From 507fc5cf256bd4d1749a6109ef45c8462dd82185 Mon Sep 17 00:00:00 2001 From: Matthew Hughes <34972397+matthewhughes934@users.noreply.github.com> Date: Thu, 27 Oct 2022 19:25:34 +0100 Subject: [PATCH 001/292] Add type inference for `dict.keys` membership (#13372) Closes #13360 --- mypy/checker.py | 2 ++ test-data/unit/pythoneval.test | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index 31177795e5e5..d66bf764b517 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -6656,6 +6656,8 @@ def builtin_item_type(tp: Type) -> Type | None: "builtins.dict", "builtins.set", "builtins.frozenset", + "_collections_abc.dict_keys", + "typing.KeysView", ]: if not tp.args: # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 59ab586b17e6..692f62bf6454 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1636,3 +1636,27 @@ foo("") foo(list("")) foo(list((list(""), ""))) [out] + +[case testNarrowTypeForDictKeys] +# flags: --strict-optional +from typing import Dict, KeysView, Optional + +d: Dict[str, int] +key: Optional[str] +if key in d.keys(): + reveal_type(key) +else: + reveal_type(key) + +kv: KeysView[str] +k: Optional[str] +if k in kv: + reveal_type(k) +else: + reveal_type(k) + +[out] +_testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" +_testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" From 2d70ac0b33b448d5ef51c0856571068dd0754af6 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 27 Oct 2022 15:04:28 -0700 Subject: [PATCH 002/292] Add hidden options to disable bytes promotion (#13952) It might be useful to run mypy_primer without promotions in typeshed. This would give us more confidence in changes stemming from https://github.com/python/typeshed/issues/9001 --- mypy/main.py | 6 ++++++ mypy/options.py | 16 +++++++++++++--- mypy/semanal_classprop.py | 4 ++++ test-data/unit/check-flags.test | 15 +++++++++++++++ 4 files changed, 38 insertions(+), 3 deletions(-) diff --git a/mypy/main.py b/mypy/main.py index 360a8ed1df17..405596c20991 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1121,6 +1121,12 @@ def add_invertible_flag( parser.add_argument( "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS ) + parser.add_argument( + "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS + ) # options specifying code to check code_group = parser.add_argument_group( diff --git a/mypy/options.py b/mypy/options.py index b89ad97708c1..3a08ff9455ee 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -56,9 +56,16 @@ class BuildType: "warn_unused_ignores", } -OPTIONS_AFFECTING_CACHE: Final = (PER_MODULE_OPTIONS | {"platform", "bazel", "plugins"}) - { - "debug_cache" -} +OPTIONS_AFFECTING_CACHE: Final = ( + PER_MODULE_OPTIONS + | { + "platform", + "bazel", + "plugins", + "disable_bytearray_promotion", + "disable_memoryview_promotion", + } +) - {"debug_cache"} # Features that are currently incomplete/experimental TYPE_VAR_TUPLE: Final = "TypeVarTuple" @@ -329,6 +336,9 @@ def __init__(self) -> None: # Deprecated reverse version of the above, do not use. self.enable_recursive_aliases = False + self.disable_bytearray_promotion = False + self.disable_memoryview_promotion = False + # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index b5a702592144..5d21babcc597 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -165,6 +165,10 @@ def add_type_promotion( if not promote_targets: if defn.fullname in TYPE_PROMOTIONS: target_sym = module_names.get(TYPE_PROMOTIONS[defn.fullname]) + if defn.fullname == "builtins.bytearray" and options.disable_bytearray_promotion: + target_sym = None + elif defn.fullname == "builtins.memoryview" and options.disable_memoryview_promotion: + target_sym = None # With test stubs, the target may not exist. if target_sym: target_info = target_sym.node diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 03c2d1f38b82..5a075dd6efef 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2128,3 +2128,18 @@ Ts = TypeVarTuple("Ts") # E: "TypeVarTuple" support is experimental, use --enab from typing_extensions import TypeVarTuple Ts = TypeVarTuple("Ts") # OK [builtins fixtures/tuple.pyi] + + +[case testDisableBytearrayPromotion] +# flags: --disable-bytearray-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) # E: Argument 1 to "f" has incompatible type "bytearray"; expected "bytes" +f(memoryview(b"asdf")) +[builtins fixtures/primitives.pyi] + +[case testDisableMemoryviewPromotion] +# flags: --disable-memoryview-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) +f(memoryview(b"asdf")) # E: Argument 1 to "f" has incompatible type "memoryview"; expected "bytes" +[builtins fixtures/primitives.pyi] From 5319fa34a8004c1568bb6f032a07b8b14cc95bed Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 12:47:21 -0700 Subject: [PATCH 003/292] Revert sum literal integer change (#13961) This is allegedly causing large performance problems, see 13821 typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing to undo. Patching this in typeshed also feels weird, since there's a more general soundness issue. If a typevar has a bound or constraint, we might not want to solve it to a Literal. If we can confirm the performance regression or fix the unsoundness within mypy, I might pursue upstreaming this in typeshed. (Reminder: add this to the sync_typeshed script once merged) --- mypy/typeshed/stdlib/builtins.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index ed60a7c018e7..d3b3f677b370 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1569,11 +1569,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = ...) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = ...) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... From 40d04a5af0b658ae5ef6181c7f3d89a984fe3547 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 13:05:49 -0700 Subject: [PATCH 004/292] Revert sum literal changes on an ongoing basis (#13962) Makes sure we continue to cherry pick https://github.com/python/mypy/pull/13961 --- misc/sync-typeshed.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index c6856f86744a..743a2934e0c3 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -184,8 +184,10 @@ def main() -> None: subprocess.run(["git", "commit", "-m", message], check=True) print("Created typeshed sync commit.") - # Currently just LiteralString reverts - commits_to_cherry_pick = ["780534b13722b7b0422178c049a1cbbf4ea4255b"] + commits_to_cherry_pick = [ + "780534b13722b7b0422178c049a1cbbf4ea4255b", # LiteralString reverts + "5319fa34a8004c1568bb6f032a07b8b14cc95bed", # sum reverts + ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) print(f"Cherry-picked {commit}.") From 41c160231bb53cb9895044506c2b08aba692922b Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 31 Oct 2022 02:35:52 +0300 Subject: [PATCH 005/292] Warn on invalid `*args` and `**kwargs` with `ParamSpec` (#13892) Closes #13890 --- mypy/semanal.py | 61 ++++++++++ .../unit/check-parameter-specification.test | 113 ++++++++++++++++++ 2 files changed, 174 insertions(+) diff --git a/mypy/semanal.py b/mypy/semanal.py index b37c9b2a5c77..b8f708b22a92 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -69,6 +69,8 @@ from mypy.nodes import ( ARG_NAMED, ARG_POS, + ARG_STAR, + ARG_STAR2, CONTRAVARIANT, COVARIANT, GDEF, @@ -843,6 +845,7 @@ def analyze_func_def(self, defn: FuncDef) -> None: defn.type = result self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) + self.check_paramspec_definition(defn) if isinstance(defn, FuncDef): assert isinstance(defn.type, CallableType) defn.type = set_callable_name(defn.type, defn) @@ -1282,6 +1285,64 @@ def check_function_signature(self, fdef: FuncItem) -> None: elif len(sig.arg_types) > len(fdef.arguments): self.fail("Type signature has too many arguments", fdef, blocker=True) + def check_paramspec_definition(self, defn: FuncDef) -> None: + func = defn.type + assert isinstance(func, CallableType) + + if not any(isinstance(var, ParamSpecType) for var in func.variables): + return # Function does not have param spec variables + + args = func.var_arg() + kwargs = func.kw_arg() + if args is None and kwargs is None: + return # Looks like this function does not have starred args + + args_defn_type = None + kwargs_defn_type = None + for arg_def, arg_kind in zip(defn.arguments, defn.arg_kinds): + if arg_kind == ARG_STAR: + args_defn_type = arg_def.type_annotation + elif arg_kind == ARG_STAR2: + kwargs_defn_type = arg_def.type_annotation + + # This may happen on invalid `ParamSpec` args / kwargs definition, + # type analyzer sets types of arguments to `Any`, but keeps + # definition types as `UnboundType` for now. + if not ( + (isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args")) + or ( + isinstance(kwargs_defn_type, UnboundType) + and kwargs_defn_type.name.endswith(".kwargs") + ) + ): + # Looks like both `*args` and `**kwargs` are not `ParamSpec` + # It might be something else, skipping. + return + + args_type = args.typ if args is not None else None + kwargs_type = kwargs.typ if kwargs is not None else None + + if ( + not isinstance(args_type, ParamSpecType) + or not isinstance(kwargs_type, ParamSpecType) + or args_type.name != kwargs_type.name + ): + if isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args"): + param_name = args_defn_type.name.split(".")[0] + elif isinstance(kwargs_defn_type, UnboundType) and kwargs_defn_type.name.endswith( + ".kwargs" + ): + param_name = kwargs_defn_type.name.split(".")[0] + else: + # Fallback for cases that probably should not ever happen: + param_name = "P" + + self.fail( + f'ParamSpec must have "*args" typed as "{param_name}.args" and "**kwargs" typed as "{param_name}.kwargs"', + func, + code=codes.VALID_TYPE, + ) + def visit_decorator(self, dec: Decorator) -> None: self.statement = dec # TODO: better don't modify them at all. diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 6af596fc1feb..6f488f108153 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1166,3 +1166,116 @@ def func3(callback: Callable[P1, str]) -> Callable[P1, str]: return "foo" return inner [builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec + +P = ParamSpec('P') + +def c1(f: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[P, int]) -> int: ... +def c3(f: Callable[P, int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[P, int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(*args, **kwargs) # E: Argument 1 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 2 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[P, int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[P, int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[P, int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[P, int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" + +# Error message test: +P1 = ParamSpec('P1') + +def m1(f: Callable[P1, int], *a, **k: P1.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecAndConcatenateDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec +from typing_extensions import Concatenate + +P = ParamSpec('P') + +def c1(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[Concatenate[int, P], int]) -> int: ... +def c3(f: Callable[Concatenate[int, P], int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[Concatenate[int, P], int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(1, *args, **kwargs) # E: Argument 2 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 3 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[Concatenate[int, P], int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[Concatenate[int, P], int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[Concatenate[int, P], int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testValidParamSpecInsideGenericWithoutArgsAndKwargs] +from typing import Callable, ParamSpec, Generic +from typing_extensions import Concatenate + +P = ParamSpec('P') + +class Some(Generic[P]): ... + +def create(s: Some[P], *args: int): ... +def update(s: Some[P], **kwargs: int): ... +def delete(s: Some[P]): ... + +def from_callable1(c: Callable[P, int], *args: int, **kwargs: int) -> Some[P]: ... +def from_callable2(c: Callable[P, int], **kwargs: int) -> Some[P]: ... +def from_callable3(c: Callable[P, int], *args: int) -> Some[P]: ... + +def from_extra1(c: Callable[Concatenate[int, P], int], *args: int, **kwargs: int) -> Some[P]: ... +def from_extra2(c: Callable[Concatenate[int, P], int], **kwargs: int) -> Some[P]: ... +def from_extra3(c: Callable[Concatenate[int, P], int], *args: int) -> Some[P]: ... +[builtins fixtures/paramspec.pyi] + + +[case testUnboundParamSpec] +from typing import Callable, ParamSpec + +P1 = ParamSpec('P1') +P2 = ParamSpec('P2') + +def f0(f: Callable[P1, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +def f1(*args: P1.args): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f2(**kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f3(*args: P1.args, **kwargs: int): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f4(*args: int, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Error message is based on the `args` definition: +def f5(*args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +def f6(*args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Multiple `ParamSpec` variables can be found, they should not affect error message: +P3 = ParamSpec('P3') + +def f7(first: Callable[P3, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f8(first: Callable[P3, int], *args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testArgsKwargsWithoutParamSpecVar] +from typing import Generic, Callable, ParamSpec + +P = ParamSpec('P') + +# This must be allowed: +class Some(Generic[P]): + def call(self, *args: P.args, **kwargs: P.kwargs): ... + +# TODO: this probably should be reported. +def call(*args: P.args, **kwargs: P.kwargs): ... +[builtins fixtures/paramspec.pyi] From 758f43c5d27eda339fd340a7b68bce59b3684254 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 31 Oct 2022 00:52:27 -0700 Subject: [PATCH 006/292] Add another invalid ParamSpec test case (#13968) Don't think there's a test case corresponding to the issue in #13966 --- test-data/unit/check-parameter-specification.test | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 6f488f108153..329985c4f75b 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1188,6 +1188,7 @@ def f1(f: Callable[P, int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSp def f2(f: Callable[P, int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" def f3(f: Callable[P, int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" def f4(f: Callable[P, int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[P, int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" # Error message test: P1 = ParamSpec('P1') @@ -1217,6 +1218,7 @@ def f1(f: Callable[Concatenate[int, P], int], *args, **kwargs: P.kwargs) -> int: def f2(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" def f3(f: Callable[Concatenate[int, P], int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" def f4(f: Callable[Concatenate[int, P], int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[Concatenate[int, P], int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" [builtins fixtures/paramspec.pyi] From 7569d88d0d650be73070c2536ddc3746d61237dd Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 31 Oct 2022 07:24:42 -0700 Subject: [PATCH 007/292] Fix crash with malformed TypedDicts and disllow-any-expr (#13963) Fixes #13066 During the semanal phase, mypy opts to ignore and skip processing any malformed or illegal statements inside of a TypedDict class definition, such as method definitions. Skipping semanal analysis on these statements can cause any number of odd downstream problems: the type-checking phase assumes that all semanal-only semantic constructs (e.g. FakeInfo) have been purged by this point, and so can crash at any point once this precondition has been violated. This diff opts to solve this problem by filtering down the list of statements so we keep only the ones we know are legal within a TypedDict definition. The other possible solution to this problem is to modify mypy so we skip checking TypedDict class bodies entirely during type checking and fine-grained deps analysis. Doing this would also let address #10007 and supersede my other diff #13732. I decided against doing this for now because: 1. I wasn't sure if this was actually safe, especially in the fine-grained deps phase and for mypyc. 2. I think no matter what, the semanal phase should not leak semanal-only types: relaxing this postcondition would make it harder to reason about mypy. So, we'd probably want to make this change regardless of what we do in the later phases. --- mypy/semanal_typeddict.py | 32 +++++++++++++++++++++-------- test-data/unit/check-typeddict.test | 13 ++++++++++++ 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index fd6b1bbd2bbf..b864c2a30615 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -23,6 +23,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, TempNode, TupleExpr, @@ -93,7 +94,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N and defn.base_type_exprs[0].fullname in TPDICT_NAMES ): # Building a new TypedDict - fields, types, required_keys = self.analyze_typeddict_classdef_fields(defn) + fields, types, statements, required_keys = self.analyze_typeddict_classdef_fields(defn) if fields is None: return True, None # Defer info = self.build_typeddict_typeinfo( @@ -102,6 +103,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements return True, info # Extending/merging existing TypedDicts @@ -139,7 +141,12 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N # Iterate over bases in reverse order so that leftmost base class' keys take precedence for base in reversed(typeddict_bases): self.add_keys_and_types_from_base(base, keys, types, required_keys, defn) - new_keys, new_types, new_required_keys = self.analyze_typeddict_classdef_fields(defn, keys) + ( + new_keys, + new_types, + new_statements, + new_required_keys, + ) = self.analyze_typeddict_classdef_fields(defn, keys) if new_keys is None: return True, None # Defer keys.extend(new_keys) @@ -151,6 +158,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = new_statements return True, info def add_keys_and_types_from_base( @@ -250,7 +258,7 @@ def map_items_to_base( def analyze_typeddict_classdef_fields( self, defn: ClassDef, oldfields: list[str] | None = None - ) -> tuple[list[str] | None, list[Type], set[str]]: + ) -> tuple[list[str] | None, list[Type], list[Statement], set[str]]: """Analyze fields defined in a TypedDict class definition. This doesn't consider inherited fields (if any). Also consider totality, @@ -259,17 +267,22 @@ def analyze_typeddict_classdef_fields( Return tuple with these items: * List of keys (or None if found an incomplete reference --> deferral) * List of types for each key + * List of statements from defn.defs.body that are legally allowed to be a + part of a TypedDict definition * Set of required keys """ fields: list[str] = [] types: list[Type] = [] + statements: list[Statement] = [] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): - # Still allow pass or ... (for empty TypedDict's). - if not isinstance(stmt, PassStmt) and not ( + # Still allow pass or ... (for empty TypedDict's) and docstrings + if isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, (EllipsisExpr, StrExpr)) ): + statements.append(stmt) + else: self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. @@ -281,8 +294,9 @@ def analyze_typeddict_classdef_fields( if name in fields: self.fail(f'Duplicate TypedDict key "{name}"', stmt) continue - # Append name and type in this case... + # Append stmt, name, and type in this case... fields.append(name) + statements.append(stmt) if stmt.type is None: types.append(AnyType(TypeOfAny.unannotated)) else: @@ -293,9 +307,9 @@ def analyze_typeddict_classdef_fields( and not self.api.is_func_scope(), ) if analyzed is None: - return None, [], set() # Need to defer + return None, [], [], set() # Need to defer types.append(analyzed) - # ...despite possible minor failures that allow further analyzis. + # ...despite possible minor failures that allow further analysis. if stmt.type is None or hasattr(stmt, "new_syntax") and not stmt.new_syntax: self.fail(TPDICT_CLASS_ERROR, stmt) elif not isinstance(stmt.rvalue, TempNode): @@ -317,7 +331,7 @@ def analyze_typeddict_classdef_fields( t.item if isinstance(t, RequiredType) else t for t in types ] - return fields, types, required_keys + return fields, types, statements, required_keys def check_typeddict( self, node: Expression, var_name: str | None, is_func_scope: bool diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 4c68b7b692ff..796f2f547528 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -221,6 +221,19 @@ reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {'y': builtins.in [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testCannotCreateTypedDictWithDecoratedFunction] +# flags: --disallow-any-expr +# https://github.com/python/mypy/issues/13066 +from typing import TypedDict +class D(TypedDict): + @classmethod # E: Invalid statement in TypedDict definition; expected "field_name: field_type" + def m(self) -> D: + pass +d = D() +reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {})" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testTypedDictWithClassmethodAlternativeConstructorDoesNotCrash] # https://github.com/python/mypy/issues/5653 from typing import TypedDict From 8c691242d6326dc9e1c8521e18fd13eaf15a3b49 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 31 Oct 2022 09:52:09 -0700 Subject: [PATCH 008/292] Preserve (some) implicitly exported types (#13967) Fixes some of #13965, fixes #12749 We also need to modify attribute access logic (this is the TODO in the PR), which is a little trickier. But cases like #13933 convinced me it's worth making this change, even before I get around to figuring that out. --- mypy/semanal.py | 28 ++++++++++++++++++++-------- test-data/unit/check-flags.test | 13 +++++++++---- test-data/unit/check-modules.test | 2 +- 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index b8f708b22a92..3a2caab41d3a 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2303,10 +2303,20 @@ def visit_import_from(self, imp: ImportFrom) -> None: ) continue - if node and not node.module_hidden: + if node: self.process_imported_symbol( node, module_id, id, imported_id, fullname, module_public, context=imp ) + if node.module_hidden: + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=not module_public, + context=imp, + add_unknown_imported_symbol=False, + ) elif module and not missing_submodule: # Target module exists but the imported name is missing or hidden. self.report_missing_module_attribute( @@ -2394,6 +2404,7 @@ def report_missing_module_attribute( module_public: bool, module_hidden: bool, context: Node, + add_unknown_imported_symbol: bool = True, ) -> None: # Missing attribute. if self.is_incomplete_namespace(import_id): @@ -2418,13 +2429,14 @@ def report_missing_module_attribute( suggestion = f"; maybe {pretty_seq(matches, 'or')}?" message += f"{suggestion}" self.fail(message, context, code=codes.ATTR_DEFINED) - self.add_unknown_imported_symbol( - imported_id, - context, - target_name=None, - module_public=module_public, - module_hidden=not module_public, - ) + if add_unknown_imported_symbol: + self.add_unknown_imported_symbol( + imported_id, + context, + target_name=None, + module_public=module_public, + module_hidden=not module_public, + ) if import_id == "typing": # The user probably has a missing definition in a test fixture. Let's verify. diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 5a075dd6efef..33723b7fee76 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1606,14 +1606,19 @@ strict_equality = false [case testNoImplicitReexport] -# flags: --no-implicit-reexport -from other_module_2 import a +# flags: --no-implicit-reexport --show-error-codes +from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" [attr-defined] +reveal_type(a) # N: Revealed type is "builtins.int" + +import other_module_2 +# TODO: this should also reveal builtins.int, see #13965 +reveal_type(other_module_2.a) # E: "object" does not explicitly export attribute "a" [attr-defined] \ + # N: Revealed type is "Any" + [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import a -[out] -main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testNoImplicitReexportRespectsAll] # flags: --no-implicit-reexport diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index a8eced3959e5..0b64daaf5abe 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1794,7 +1794,7 @@ from stub import C c = C() reveal_type(c.x) # N: Revealed type is "builtins.int" it: Iterable[int] -reveal_type(it) # N: Revealed type is "Any" +reveal_type(it) # N: Revealed type is "typing.Iterable[builtins.int]" [file stub.pyi] from typing import Iterable From 55d0adf17a15ddbd0a8e9fb9d27f848117522a17 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Tue, 1 Nov 2022 12:32:44 +0200 Subject: [PATCH 009/292] Update version on master for next release (#13974) Next release is 1.0 so also update the comments. NOTE: Should we go with 1.0 or 1.0.0? I chose 1.0.0 to make it clear that we can have minor revisions 3 numbers deep. i.e. x.y.z --- mypy/version.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mypy/version.py b/mypy/version.py index 837206834e38..b125385f9b43 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,10 +5,10 @@ from mypy import git # Base version. -# - Release versions have the form "0.NNN". -# - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). -# - For 1.0 we'll switch back to 1.2.3 form. -__version__ = "0.990+dev" +# - Release versions have the form "1.2.3". +# - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). +# - Before 1.0 we had the form "0.NNN". +__version__ = "1.0.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From 32c26f208732097c0fcc3d4cf0fb2cd9fbd1a99b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 1 Nov 2022 13:20:59 +0000 Subject: [PATCH 010/292] Typeshed branch 'master' was renamed to 'main' (#13980) There aren't really any big changes required over on mypy's side (I was worried there would be more breakage, but I think mypy should be fine). Cf. https://github.com/python/typeshed/issues/8956. --- misc/sync-typeshed.py | 2 +- mypy/checker.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 743a2934e0c3..878ffaa23bfb 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -134,7 +134,7 @@ def main() -> None: parser.add_argument( "--commit", default=None, - help="Typeshed commit (default to latest master if using a repository clone)", + help="Typeshed commit (default to latest main if using a repository clone)", ) parser.add_argument( "--typeshed-dir", diff --git a/mypy/checker.py b/mypy/checker.py index d66bf764b517..a23c03b2b09a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4580,7 +4580,7 @@ def visit_with_stmt(self, s: WithStmt) -> None: # exceptions or not. We determine this using a heuristic based on the # return type of the __exit__ method -- see the discussion in # https://github.com/python/mypy/issues/7214 and the section about context managers - # in https://github.com/python/typeshed/blob/master/CONTRIBUTING.md#conventions + # in https://github.com/python/typeshed/blob/main/CONTRIBUTING.md#conventions # for more details. exit_ret_type = get_proper_type(exit_ret_type) From fed90caa47e61ccaa4b9aac44f2d7545bcf1a955 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 1 Nov 2022 18:32:47 +0000 Subject: [PATCH 011/292] ipc: Remove some typeshed-related TODOs (#13982) As per the TODO comments, this PR removes some unnecessary `assert`s in `mypy/ipc.py`. When these `assert`s were added in #6148, typeshed wasn't able to use `Literal` types yet. However, nowadays, typeshed has precise `Literal` overloads for `_winapi.WriteFile` and `_winapi.ConnectNamedType`, meaning mypy is able to precisely infer the return type of these function calls without the need for the `assert`s in `mypy/ipc.py`: https://github.com/python/typeshed/blob/main/stdlib/_winapi.pyi. --- mypy/ipc.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/mypy/ipc.py b/mypy/ipc.py index d52769bdb2b1..f07616df0fd0 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -89,9 +89,6 @@ def write(self, data: bytes) -> None: if sys.platform == "win32": try: ov, err = _winapi.WriteFile(self.connection, data, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) - assert isinstance(err, int) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE @@ -217,8 +214,6 @@ def __enter__(self) -> IPCServer: # client never connects, though this can be "solved" by killing the server try: ov = _winapi.ConnectNamedPipe(self.connection, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) except OSError as e: # Don't raise if the client already exists, or the client already connected if e.winerror not in (_winapi.ERROR_PIPE_CONNECTED, _winapi.ERROR_NO_DATA): From 8b825472a02f0a30419c02e285ba931107a42959 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 1 Nov 2022 19:58:45 +0000 Subject: [PATCH 012/292] checker.py: Remove unneeded `cast()` (#13984) --- mypy/checker.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index a23c03b2b09a..71e2be3d7383 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7,7 +7,6 @@ from contextlib import contextmanager, nullcontext from typing import ( AbstractSet, - Any, Callable, Dict, Generic, @@ -3453,8 +3452,7 @@ def check_multi_assignment_from_union( assert declared_type is not None clean_items.append((type, declared_type)) - # TODO: fix signature of zip() in typeshed. - types, declared_types = cast(Any, zip)(*clean_items) + types, declared_types = zip(*clean_items) self.binder.assign_type( expr, make_simplified_union(list(types)), From 0457d33609ef8dd7d7e32ff18dd39da3e4ecc3fc Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 2 Nov 2022 21:06:33 -0700 Subject: [PATCH 013/292] Make TryStar not crash (#13991) --- mypy/fastparse.py | 22 ++++++++++++++++++++++ mypy/test/helpers.py | 8 +++++++- mypy/test/testcheck.py | 2 ++ test-data/unit/check-python311.test | 6 ++++++ 4 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 test-data/unit/check-python311.test diff --git a/mypy/fastparse.py b/mypy/fastparse.py index a5c51c72934e..0d42ef53f456 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -212,6 +212,10 @@ def ast3_parse( MatchAs = Any MatchOr = Any AstNode = Union[ast3.expr, ast3.stmt, ast3.ExceptHandler] + if sys.version_info >= (3, 11): + TryStar = ast3.TryStar + else: + TryStar = Any except ImportError: try: from typed_ast import ast35 # type: ignore[attr-defined] # noqa: F401 @@ -1249,6 +1253,24 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: ) return self.set_line(node, n) + def visit_TryStar(self, n: TryStar) -> TryStmt: + # TODO: we treat TryStar exactly like Try, which makes mypy not crash. See #12840 + vs = [ + self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers + ] + types = [self.visit(h.type) for h in n.handlers] + handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] + + node = TryStmt( + self.as_required_block(n.body, n.lineno), + vs, + types, + handlers, + self.as_block(n.orelse, n.lineno), + self.as_block(n.finalbody, n.lineno), + ) + return self.set_line(node, n) + # Assert(expr test, expr? msg) def visit_Assert(self, n: ast3.Assert) -> AssertStmt: node = AssertStmt(self.visit(n.test), self.visit(n.msg)) diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 8bee8073bd16..145027404ff7 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -282,8 +282,14 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: def testfile_pyversion(path: str) -> tuple[int, int]: - if path.endswith("python310.test"): + if path.endswith("python311.test"): + return 3, 11 + elif path.endswith("python310.test"): return 3, 10 + elif path.endswith("python39.test"): + return 3, 9 + elif path.endswith("python38.test"): + return 3, 8 else: return defaults.PYTHON3_VERSION diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 442e25b54ff2..4fe2ee6393c0 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -44,6 +44,8 @@ typecheck_files.remove("check-python39.test") if sys.version_info < (3, 10): typecheck_files.remove("check-python310.test") +if sys.version_info < (3, 11): + typecheck_files.remove("check-python311.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test new file mode 100644 index 000000000000..b98bccc9059d --- /dev/null +++ b/test-data/unit/check-python311.test @@ -0,0 +1,6 @@ +[case testTryStarDoesNotCrash] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.Exception" +[builtins fixtures/exception.pyi] From a4da89e0543ee0d213b30216c9e15a5d00fcd578 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 2 Nov 2022 22:10:17 -0700 Subject: [PATCH 014/292] Better story for import redefinitions (#13969) This changes our importing logic to be more consistent and to treat import statements more like assignments. Fixes #13803, fixes #13914, fixes half of #12965, probably fixes #12574 The primary motivation for this is when typing modules as protocols, as in #13803. But it turns out we already allowed redefinition with "from" imports, so this also seems like a nice consistency win. We move shared logic from visit_import_all and visit_import_from (via process_imported_symbol) into add_imported_symbol. We then reuse it in visit_import. To simplify stuff, we inline the code from add_module_symbol into visit_import. Then we copy over logic from add_symbol, because MypyFile is not a SymbolTableNode, but this isn't the worst thing ever. Finally, we now need to check non-from import statements like assignments, which was a thing we weren't doing earlier. --- mypy/checker.py | 4 +- mypy/semanal.py | 84 +++++++++++++-------------- test-data/unit/check-classes.test | 3 +- test-data/unit/check-incremental.test | 5 +- test-data/unit/check-modules.test | 19 ++++++ test-data/unit/check-protocols.test | 56 ++++++++++++++++++ test-data/unit/check-redefine.test | 2 +- 7 files changed, 119 insertions(+), 54 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 71e2be3d7383..8973ade98228 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2527,8 +2527,8 @@ def visit_import_from(self, node: ImportFrom) -> None: def visit_import_all(self, node: ImportAll) -> None: self.check_import(node) - def visit_import(self, s: Import) -> None: - pass + def visit_import(self, node: Import) -> None: + self.check_import(node) def check_import(self, node: ImportBase) -> None: for assign in node.assignments: diff --git a/mypy/semanal.py b/mypy/semanal.py index 3a2caab41d3a..77555648ba7e 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2235,13 +2235,33 @@ def visit_import(self, i: Import) -> None: base_id = id.split(".")[0] imported_id = base_id module_public = use_implicit_reexport - self.add_module_symbol( - base_id, - imported_id, - context=i, - module_public=module_public, - module_hidden=not module_public, - ) + + if base_id in self.modules: + node = self.modules[base_id] + if self.is_func_scope(): + kind = LDEF + elif self.type is not None: + kind = MDEF + else: + kind = GDEF + symbol = SymbolTableNode( + kind, node, module_public=module_public, module_hidden=not module_public + ) + self.add_imported_symbol( + imported_id, + symbol, + context=i, + module_public=module_public, + module_hidden=not module_public, + ) + else: + self.add_unknown_imported_symbol( + imported_id, + context=i, + target_name=base_id, + module_public=module_public, + module_hidden=not module_public, + ) def visit_import_from(self, imp: ImportFrom) -> None: self.statement = imp @@ -2377,19 +2397,6 @@ def process_imported_symbol( module_hidden=module_hidden, becomes_typeinfo=True, ) - existing_symbol = self.globals.get(imported_id) - if ( - existing_symbol - and not isinstance(existing_symbol.node, PlaceholderNode) - and not isinstance(node.node, PlaceholderNode) - ): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(imported_id, existing_symbol, node, context): - return - if existing_symbol and isinstance(node.node, PlaceholderNode): - # Imports are special, some redefinitions are allowed, so wait until - # we know what is the new symbol node. - return # NOTE: we take the original node even for final `Var`s. This is to support # a common pattern when constants are re-exported (same applies to import *). self.add_imported_symbol( @@ -2507,14 +2514,9 @@ def visit_import_all(self, i: ImportAll) -> None: if isinstance(node.node, MypyFile): # Star import of submodule from a package, add it as a dependency. self.imports.add(node.node.fullname) - existing_symbol = self.lookup_current_scope(name) - if existing_symbol and not isinstance(node.node, PlaceholderNode): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(name, existing_symbol, node, i): - continue # `from x import *` always reexports symbols self.add_imported_symbol( - name, node, i, module_public=True, module_hidden=False + name, node, context=i, module_public=True, module_hidden=False ) else: @@ -5589,24 +5591,6 @@ def add_local(self, node: Var | FuncDef | OverloadedFuncDef, context: Context) - node._fullname = name self.add_symbol(name, node, context) - def add_module_symbol( - self, id: str, as_id: str, context: Context, module_public: bool, module_hidden: bool - ) -> None: - """Add symbol that is a reference to a module object.""" - if id in self.modules: - node = self.modules[id] - self.add_symbol( - as_id, node, context, module_public=module_public, module_hidden=module_hidden - ) - else: - self.add_unknown_imported_symbol( - as_id, - context, - target_name=id, - module_public=module_public, - module_hidden=module_hidden, - ) - def _get_node_for_class_scoped_import( self, name: str, symbol_node: SymbolNode | None, context: Context ) -> SymbolNode | None: @@ -5653,13 +5637,23 @@ def add_imported_symbol( self, name: str, node: SymbolTableNode, - context: Context, + context: ImportBase, module_public: bool, module_hidden: bool, ) -> None: """Add an alias to an existing symbol through import.""" assert not module_hidden or not module_public + existing_symbol = self.lookup_current_scope(name) + if ( + existing_symbol + and not isinstance(existing_symbol.node, PlaceholderNode) + and not isinstance(node.node, PlaceholderNode) + ): + # Import can redefine a variable. They get special treatment. + if self.process_import_over_existing_name(name, existing_symbol, node, context): + return + symbol_node: SymbolNode | None = node.node if self.is_class_scope(): diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index b16387f194d4..4ed44c90f275 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7414,8 +7414,7 @@ class Foo: def meth1(self, a: str) -> str: ... # E: Name "meth1" already defined on line 5 def meth2(self, a: str) -> str: ... - from mod1 import meth2 # E: Unsupported class scoped import \ - # E: Name "meth2" already defined on line 8 + from mod1 import meth2 # E: Incompatible import of "meth2" (imported name has type "Callable[[int], int]", local name has type "Callable[[Foo, str], str]") class Bar: from mod1 import foo # E: Unsupported class scoped import diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index d4e6779403b4..3ec0ed2c63f5 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1025,10 +1025,7 @@ import a.b [file a/b.py] -[rechecked b] -[stale] -[out2] -tmp/b.py:4: error: Name "a" already defined on line 3 +[stale b] [case testIncrementalSilentImportsAndImportsInClass] # flags: --ignore-missing-imports diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 0b64daaf5abe..b3267f66653d 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -651,10 +651,29 @@ try: from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]") except: pass + +import m as f # E: Incompatible import of "f" (imported name has type "object", local name has type "Callable[[Any], Any]") + [file m.py] def f(x): pass def g(x, y): pass +[case testRedefineTypeViaImport] +from typing import Type +import mod + +X: Type[mod.A] +Y: Type[mod.B] +from mod import B as X +from mod import A as Y # E: Incompatible import of "Y" (imported name has type "Type[A]", local name has type "Type[B]") + +import mod as X # E: Incompatible import of "X" (imported name has type "object", local name has type "Type[A]") + +[file mod.py] +class A: ... +class B(A): ... + + [case testImportVariableAndAssignNone] try: from m import x diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 8cdfd2a3e0d9..113b2000fc22 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -3787,3 +3787,59 @@ from typing_extensions import Final a: Final = 1 [builtins fixtures/module.pyi] + + +[case testModuleAsProtocolRedefinitionTopLevel] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + import mod1 as t +else: + import mod2 as t + +import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +[file mod1.py] +def f() -> str: ... + +[file mod2.py] +def f() -> str: ... + +[file badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolRedefinitionImportFrom] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + from package import mod1 as t +else: + from package import mod2 as t + +from package import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +package: int = 10 + +import package.mod1 as t +import package.mod1 # E: Incompatible import of "package" (imported name has type Module, local name has type "int") + +[file package/mod1.py] +def f() -> str: ... + +[file package/mod2.py] +def f() -> str: ... + +[file package/badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] diff --git a/test-data/unit/check-redefine.test b/test-data/unit/check-redefine.test index e73f715c9ec0..e3f1b976d4e9 100644 --- a/test-data/unit/check-redefine.test +++ b/test-data/unit/check-redefine.test @@ -285,7 +285,7 @@ def f() -> None: import typing as m m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) n = 1 - import typing as n # E: Name "n" already defined on line 5 + import typing as n # E: Incompatible import of "n" (imported name has type Module, local name has type "int") [builtins fixtures/module.pyi] [case testRedefineLocalWithTypeAnnotation] From 77a92b598bbfa129613d16e17b5e00ad7af9d1ab Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 3 Nov 2022 05:53:00 -0700 Subject: [PATCH 015/292] [mypyc] allow use of enum.Enum (#13995) Fixes https://github.com/mypyc/mypyc/issues/896 --- mypyc/irbuild/classdef.py | 6 +++++- mypyc/test-data/run-classes.test | 10 ++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 2c412253ec71..905747fed554 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -482,7 +482,11 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: name = "_NamedTuple" base = builder.get_module_attr("typing", name, cdef.line) else: - base = builder.load_global_str(cls.name, cdef.line) + cls_module = cls.fullname.rsplit(".", 1)[0] + if cls_module == builder.current_module: + base = builder.load_global_str(cls.name, cdef.line) + else: + base = builder.load_module_attr_by_fullname(cls.fullname, cdef.line) bases.append(base) if cls.fullname in MAGIC_TYPED_DICT_CLASSES: # The remaining base classes are synthesized by mypy and should be ignored. diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index d505bda2d705..177bae0cc895 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -263,6 +263,16 @@ class TestEnum(Enum): assert TestEnum.test() == 3 +import enum + +class Pokemon(enum.Enum): + magikarp = 1 + squirtle = 2 + slowbro = 3 + +assert Pokemon.magikarp.value == 1 +assert Pokemon.squirtle.name == 'squirtle' + [file other.py] # Force a multi-module test to make sure we can compile multi-file with # non-extension classes From 451ae0e4600af848b02390495e4317b997d51032 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 3 Nov 2022 05:54:25 -0700 Subject: [PATCH 016/292] [mypyc] fix for typeshed changes to Collection (#13994) --- mypyc/irbuild/classdef.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 905747fed554..34fc1fd766b0 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -451,6 +451,7 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: "typing.Collection", "typing.Reversible", "typing.Container", + "typing.Sized", ): # HAX: Synthesized base classes added by mypy don't exist at runtime, so skip them. # This could break if they were added explicitly, though... From bc78eb56eb93f75be749c02860ccecbf66022a6f Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Thu, 3 Nov 2022 05:55:16 -0700 Subject: [PATCH 017/292] Fix Mypyc not compiling on Arch Linux (#13978) Closes https://github.com/mypyc/mypyc/issues/956 This was originally a part of #13953, see https://github.com/python/mypy/pull/13953#discussion_r1007593391 --- mypyc/build.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mypyc/build.py b/mypyc/build.py index 4f40a6cd0865..a9aa16f5dfee 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -534,6 +534,10 @@ def mypycify( "-Wno-unused-command-line-argument", "-Wno-unknown-warning-option", "-Wno-unused-but-set-variable", + "-Wno-ignored-optimization-argument", + # Disables C Preprocessor (cpp) warnings + # See https://github.com/mypyc/mypyc/issues/956 + "-Wno-cpp", ] elif compiler.compiler_type == "msvc": # msvc doesn't have levels, '/O2' is full and '/Od' is disable From 11a040d418b95d1956e90b734ac3e832e119090d Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Thu, 3 Nov 2022 16:34:59 -0700 Subject: [PATCH 018/292] Add `__match_args__` to node types (#13330) Closes #13243. For the most part, in order to determine which fields where worth pattern matching against, I looked at what was being displayed in the `StrConv` class (defined in `mypy/strconv.py`). I might've missed some fields, but for the most part all of the important ones are there. In addition, to make pattern matching feel more natural, I re-arranged the fields to better align with how they look in the source code. For example, when matching against `OpExpr`, you would do: ```python match o: case OpExpr(IntExpr(1), "+", IntExpr(2)): pass ``` instead of: ```python match o: case OpExpr("+", IntExpr(1), IntExpr(2)): pass ``` --- mypy/nodes.py | 136 ++++++++++++++++++++++++++++++++++++++ mypyc/irbuild/classdef.py | 2 +- mypyc/irbuild/prepare.py | 6 +- 3 files changed, 142 insertions(+), 2 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 7334d9114346..9221ec48aa61 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -298,6 +298,8 @@ class MypyFile(SymbolNode): "future_import_flags", ) + __match_args__ = ("name", "path", "defs") + # Fully qualified module name _fullname: Bogus[str] # Path to the file (empty string if not known) @@ -433,6 +435,8 @@ class Import(ImportBase): __slots__ = ("ids",) + __match_args__ = ("ids",) + ids: list[tuple[str, str | None]] # (module id, as id) def __init__(self, ids: list[tuple[str, str | None]]) -> None: @@ -448,6 +452,8 @@ class ImportFrom(ImportBase): __slots__ = ("id", "names", "relative") + __match_args__ = ("id", "names", "relative") + id: str relative: int names: list[tuple[str, str | None]] # Tuples (name, as name) @@ -467,6 +473,8 @@ class ImportAll(ImportBase): __slots__ = ("id", "relative", "imported_names") + __match_args__ = ("id", "relative") + id: str relative: int # NOTE: Only filled and used by old semantic analyzer. @@ -652,6 +660,8 @@ class Argument(Node): __slots__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + __match_args__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + def __init__( self, variable: Var, @@ -790,6 +800,8 @@ class FuncDef(FuncItem, SymbolNode, Statement): "is_mypy_only", ) + __match_args__ = ("name", "arguments", "type", "body") + # Note that all __init__ args must have default values def __init__( self, @@ -880,6 +892,8 @@ class Decorator(SymbolNode, Statement): __slots__ = ("func", "decorators", "original_decorators", "var", "is_overload") + __match_args__ = ("decorators", "var", "func") + func: FuncDef # Decorated function decorators: list[Expression] # Decorators (may be empty) # Some decorators are removed by semanal, keep the original here. @@ -991,6 +1005,8 @@ class Var(SymbolNode): "invalid_partial_type", ) + __match_args__ = ("name", "type", "final_value") + def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: super().__init__() self._name = name # Name without module prefix @@ -1099,6 +1115,8 @@ class ClassDef(Statement): "deco_line", ) + __match_args__ = ("name", "defs") + name: str # Name of the class without module prefix fullname: Bogus[str] # Fully qualified name of the class defs: Block @@ -1176,6 +1194,8 @@ class GlobalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1191,6 +1211,8 @@ class NonlocalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1204,6 +1226,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class Block(Statement): __slots__ = ("body", "is_unreachable") + __match_args__ = ("body", "is_unreachable") + def __init__(self, body: list[Statement]) -> None: super().__init__() self.body = body @@ -1226,6 +1250,8 @@ class ExpressionStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1258,6 +1284,8 @@ class AssignmentStmt(Statement): "invalid_recursive_alias", ) + __match_args__ = ("lvalues", "rvalues", "type") + lvalues: list[Lvalue] # This is a TempNode if and only if no rvalue (x: t). rvalue: Expression @@ -1306,6 +1334,8 @@ class OperatorAssignmentStmt(Statement): __slots__ = ("op", "lvalue", "rvalue") + __match_args__ = ("lvalue", "op", "rvalue") + op: str # TODO: Enum? lvalue: Lvalue rvalue: Expression @@ -1323,6 +1353,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WhileStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: Expression body: Block else_body: Block | None @@ -1350,6 +1382,8 @@ class ForStmt(Statement): "is_async", ) + __match_args__ = ("index", "index_type", "expr", "body", "else_body") + # Index variables index: Lvalue # Type given by type comments for index, can be None @@ -1392,6 +1426,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ReturnStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1405,6 +1441,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class AssertStmt(Statement): __slots__ = ("expr", "msg") + __match_args__ = ("expr", "msg") + expr: Expression msg: Expression | None @@ -1420,6 +1458,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class DelStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Lvalue def __init__(self, expr: Lvalue) -> None: @@ -1454,6 +1494,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class IfStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: list[Expression] body: list[Block] else_body: Block | None @@ -1471,6 +1513,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class RaiseStmt(Statement): __slots__ = ("expr", "from_expr") + __match_args__ = ("expr", "from_expr") + # Plain 'raise' is a valid statement. expr: Expression | None from_expr: Expression | None @@ -1487,6 +1531,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + body: Block # Try body # Plain 'except:' also possible types: list[Expression | None] # Except type expressions @@ -1519,6 +1565,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WithStmt(Statement): __slots__ = ("expr", "target", "unanalyzed_type", "analyzed_types", "body", "is_async") + __match_args__ = ("expr", "target", "body") + expr: list[Expression] target: list[Lvalue | None] # Type given by type comments for target, can be None @@ -1548,6 +1596,10 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class MatchStmt(Statement): + __slots__ = ("subject", "patterns", "guards", "bodies") + + __match_args__ = ("subject", "patterns", "guards", "bodies") + subject: Expression patterns: list[Pattern] guards: list[Expression | None] @@ -1579,6 +1631,8 @@ class IntExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: int # 0 by default def __init__(self, value: int) -> None: @@ -1600,6 +1654,8 @@ class StrExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: str # '' by default def __init__(self, value: str) -> None: @@ -1615,6 +1671,8 @@ class BytesExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + # Note: we deliberately do NOT use bytes here because it ends up # unnecessarily complicating a lot of the result logic. For example, # we'd have to worry about converting the bytes into a format we can @@ -1639,6 +1697,8 @@ class FloatExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: float # 0.0 by default def __init__(self, value: float) -> None: @@ -1654,6 +1714,8 @@ class ComplexExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: complex def __init__(self, value: complex) -> None: @@ -1678,6 +1740,8 @@ class StarExpr(Expression): __slots__ = ("expr", "valid") + __match_args__ = ("expr", "valid") + expr: Expression valid: bool @@ -1734,6 +1798,8 @@ class NameExpr(RefExpr): __slots__ = ("name", "is_special_form") + __match_args__ = ("name", "node") + def __init__(self, name: str) -> None: super().__init__() self.name = name # Name referred to (may be qualified) @@ -1752,6 +1818,8 @@ class MemberExpr(RefExpr): __slots__ = ("expr", "name", "def_var") + __match_args__ = ("expr", "name", "node") + def __init__(self, expr: Expression, name: str) -> None: super().__init__() self.expr = expr @@ -1813,6 +1881,8 @@ class CallExpr(Expression): __slots__ = ("callee", "args", "arg_kinds", "arg_names", "analyzed") + __match_args__ = ("callee", "args", "arg_kinds", "arg_names") + def __init__( self, callee: Expression, @@ -1841,6 +1911,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldFromExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1854,6 +1926,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1872,6 +1946,8 @@ class IndexExpr(Expression): __slots__ = ("base", "index", "method_type", "analyzed") + __match_args__ = ("base", "index") + base: Expression index: Expression # Inferred __getitem__ method type @@ -1896,6 +1972,8 @@ class UnaryExpr(Expression): __slots__ = ("op", "expr", "method_type") + __match_args__ = ("op", "expr") + op: str # TODO: Enum? expr: Expression # Inferred operator method type @@ -1916,6 +1994,8 @@ class AssignmentExpr(Expression): __slots__ = ("target", "value") + __match_args__ = ("target", "value") + def __init__(self, target: Expression, value: Expression) -> None: super().__init__() self.target = target @@ -1931,6 +2011,8 @@ class OpExpr(Expression): __slots__ = ("op", "left", "right", "method_type", "right_always", "right_unreachable") + __match_args__ = ("left", "op", "right") + op: str # TODO: Enum? left: Expression right: Expression @@ -1959,6 +2041,8 @@ class ComparisonExpr(Expression): __slots__ = ("operators", "operands", "method_types") + __match_args__ = ("operands", "operators") + operators: list[str] operands: list[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). @@ -1989,6 +2073,8 @@ class SliceExpr(Expression): __slots__ = ("begin_index", "end_index", "stride") + __match_args__ = ("begin_index", "end_index", "stride") + begin_index: Expression | None end_index: Expression | None stride: Expression | None @@ -2013,6 +2099,8 @@ class CastExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2030,6 +2118,8 @@ class AssertTypeExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2047,6 +2137,8 @@ class RevealExpr(Expression): __slots__ = ("expr", "kind", "local_nodes") + __match_args__ = ("expr", "kind", "local_nodes") + expr: Expression | None kind: int local_nodes: list[Var] | None @@ -2068,6 +2160,8 @@ class SuperExpr(Expression): __slots__ = ("name", "info", "call") + __match_args__ = ("name", "call", "info") + name: str info: TypeInfo | None # Type that contains this super expression call: CallExpr # The expression super(...) @@ -2085,6 +2179,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class LambdaExpr(FuncItem, Expression): """Lambda expression""" + __match_args__ = ("arguments", "arg_names", "arg_kinds", "body") + @property def name(self) -> str: return "" @@ -2108,6 +2204,8 @@ class ListExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2123,6 +2221,8 @@ class DictExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[tuple[Expression | None, Expression]] def __init__(self, items: list[tuple[Expression | None, Expression]]) -> None: @@ -2140,6 +2240,8 @@ class TupleExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2155,6 +2257,8 @@ class SetExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2170,6 +2274,8 @@ class GeneratorExpr(Expression): __slots__ = ("left_expr", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("left_expr", "indices", "sequences", "condlists") + left_expr: Expression sequences: list[Expression] condlists: list[list[Expression]] @@ -2200,6 +2306,8 @@ class ListComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2215,6 +2323,8 @@ class SetComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2230,6 +2340,8 @@ class DictionaryComprehension(Expression): __slots__ = ("key", "value", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("key", "value", "indices", "sequences", "condlists") + key: Expression value: Expression sequences: list[Expression] @@ -2263,6 +2375,8 @@ class ConditionalExpr(Expression): __slots__ = ("cond", "if_expr", "else_expr") + __match_args__ = ("if_expr", "cond", "else_expr") + cond: Expression if_expr: Expression else_expr: Expression @@ -2282,6 +2396,8 @@ class TypeApplication(Expression): __slots__ = ("expr", "types") + __match_args__ = ("expr", "types") + expr: Expression types: list[mypy.types.Type] @@ -2359,6 +2475,8 @@ class TypeVarExpr(TypeVarLikeExpr): __slots__ = ("values",) + __match_args__ = ("name", "values", "upper_bound") + # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. values: list[mypy.types.Type] @@ -2402,6 +2520,8 @@ def deserialize(cls, data: JsonDict) -> TypeVarExpr: class ParamSpecExpr(TypeVarLikeExpr): __slots__ = () + __match_args__ = ("name", "upper_bound") + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_paramspec_expr(self) @@ -2430,6 +2550,8 @@ class TypeVarTupleExpr(TypeVarLikeExpr): __slots__ = () + __match_args__ = ("name", "upper_bound") + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_tuple_expr(self) @@ -2458,6 +2580,8 @@ class TypeAliasExpr(Expression): __slots__ = ("type", "tvars", "no_args", "node") + __match_args__ = ("type", "tvars", "no_args", "node") + # The target type. type: mypy.types.Type # Names of unbound type variables used to define the alias @@ -2486,6 +2610,8 @@ class NamedTupleExpr(Expression): __slots__ = ("info", "is_typed") + __match_args__ = ("info",) + # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) info: TypeInfo @@ -2505,6 +2631,8 @@ class TypedDictExpr(Expression): __slots__ = ("info",) + __match_args__ = ("info",) + # The class representation of this typed dict info: TypeInfo @@ -2521,6 +2649,8 @@ class EnumCallExpr(Expression): __slots__ = ("info", "items", "values") + __match_args__ = ("info", "items", "values") + # The class representation of this enumerated type info: TypeInfo # The item names (for debugging) @@ -2557,6 +2687,8 @@ class NewTypeExpr(Expression): __slots__ = ("name", "old_type", "info") + __match_args__ = ("name", "old_type", "info") + name: str # The base type (the second argument to NewType) old_type: mypy.types.Type | None @@ -2580,6 +2712,8 @@ class AwaitExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -3283,6 +3417,8 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here "eager", ) + __match_args__ = ("name", "target", "alias_tvars", "no_args") + def __init__( self, target: mypy.types.Type, diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 34fc1fd766b0..4502c201a2e8 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -629,7 +629,7 @@ def find_attr_initializers( and not isinstance(stmt.rvalue, TempNode) ): name = stmt.lvalues[0].name - if name == "__slots__": + if name in ("__slots__", "__match_args__"): continue if name == "__deletable__": diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 82162d1d0d0e..dc153ea11561 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -226,7 +226,11 @@ def prepare_class_def( if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name - if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): + if not node.node.is_classvar and name not in ( + "__slots__", + "__deletable__", + "__match_args__", + ): ir.attributes[name] = mapper.type_to_rtype(node.node.type) elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) From 428b1723285bcb44b74d4986105d631d41c25e97 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 4 Nov 2022 00:26:07 -0700 Subject: [PATCH 019/292] Clarify docs surrounding install-types (#14003) See #13681 More clearly point out the speed cost and the alternative of installing stub packages directly --- docs/source/running_mypy.rst | 46 ++++++++++++------------------------ 1 file changed, 15 insertions(+), 31 deletions(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index a7eb3fc5e1e7..264cec59749e 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -322,34 +322,27 @@ the library, you will get a message like this: main.py:1: note: Hint: "python3 -m pip install types-PyYAML" main.py:1: note: (or run "mypy --install-types" to install all missing stub packages) -You can resolve the issue by running the suggested pip command or -commands. Alternatively, you can use :option:`--install-types ` to install all known missing stubs: +You can resolve the issue by running the suggested pip commands. +If you're running mypy in CI, you can ensure the presence of any stub packages +you need the same as you would any other test dependency, e.g. by adding them to +the appropriate ``requirements.txt`` file. -.. code-block:: text - - mypy --install-types - -This installs any stub packages that were suggested in the previous -mypy run. You can also use your normal mypy command line with the -extra :option:`--install-types ` option to -install missing stubs at the end of the run (if any were found). - -Use :option:`--install-types ` with -:option:`--non-interactive ` to install all suggested -stub packages without asking for confirmation, *and* type check your -code, in a single command: +Alternatively, add the :option:`--install-types ` +to your mypy command to install all known missing stubs: .. code-block:: text - mypy --install-types --non-interactive src/ + mypy --install-types -This can be useful in Continuous Integration jobs if you'd prefer not -to manage stub packages manually. This is somewhat slower than -explicitly installing stubs before running mypy, since it may type -check your code twice -- the first time to find the missing stubs, and +This is slower than explicitly installing stubs, since if effectively +runs mypy twice -- the first time to find the missing stubs, and the second time to type check your code properly after mypy has -installed the stubs. +installed the stubs. It also can make controlling stub versions harder, +resulting in less reproducible type checking. + +By default, :option:`--install-types ` shows a confirmation prompt. +Use :option:`--non-interactive ` to install all suggested +stub packages without asking for confirmation *and* type check your code: If you've already installed the relevant third-party libraries in an environment other than the one mypy is running in, you can use :option:`--python-executable @@ -394,15 +387,6 @@ this error, try: you must run ``mypy ~/foo-project/src`` (or set the ``MYPYPATH`` to ``~/foo-project/src``. -In some rare cases, you may get the "Cannot find implementation or library -stub for module" error even when the module is installed in your system. -This can happen when the module is both missing type hints and is installed -on your system in an unconventional way. - -In this case, follow the steps above on how to handle -:ref:`missing type hints in third party libraries `. - - .. _finding-imports: How imports are found From 796068d06a0ab171d22f7be555c28dfad57db433 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 4 Nov 2022 11:04:52 +0000 Subject: [PATCH 020/292] running_mypy.rst: Fix typo (#14004) Introduced in #14003 --- docs/source/running_mypy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 264cec59749e..3deaf26023fc 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -334,7 +334,7 @@ to your mypy command to install all known missing stubs: mypy --install-types -This is slower than explicitly installing stubs, since if effectively +This is slower than explicitly installing stubs, since it effectively runs mypy twice -- the first time to find the missing stubs, and the second time to type check your code properly after mypy has installed the stubs. It also can make controlling stub versions harder, From 331b170c5d0e55a11a78a7f60fad5a8a8b5d6f2c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 4 Nov 2022 16:03:33 +0000 Subject: [PATCH 021/292] Fix crash on nested unions in recursive types (#14007) Fixes #14000 This will introduce some minor perf penalty, but only for code that actually uses recursive types. --- mypy/typeops.py | 4 ++-- test-data/unit/check-recursive-types.test | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/mypy/typeops.py b/mypy/typeops.py index 7eb1a67b46ea..5b29dc71991b 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -71,13 +71,13 @@ def is_recursive_pair(s: Type, t: Type) -> bool: """ if isinstance(s, TypeAliasType) and s.is_recursive: return ( - isinstance(get_proper_type(t), Instance) + isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( - isinstance(get_proper_type(s), Instance) + isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive ) diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index a0875c60362c..0d727b109658 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -808,3 +808,21 @@ def test2() -> Tree2: def test3() -> Tree3: return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree3, Tree3, Tree3]]") [builtins fixtures/tuple.pyi] + +[case testRecursiveDoubleUnionNoCrash] +from typing import Tuple, Union, Callable, Sequence + +K = Union[int, Tuple[Union[int, K]]] +L = Union[int, Callable[[], Union[int, L]]] +M = Union[int, Sequence[Union[int, M]]] + +x: K +x = x +y: L +y = y +z: M +z = z + +x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") +z = x # OK +[builtins fixtures/tuple.pyi] From 807da2675bfdc7ec434f8d5677c13722a555e8da Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 5 Nov 2022 18:51:59 -0700 Subject: [PATCH 022/292] Fix and optimise overload compatibility checking (#14018) Discovered as part of #14017 --- mypy/subtypes.py | 54 ++++++++++++--------------- test-data/unit/check-classes.test | 59 +++++++++++++++++++++++------- test-data/unit/check-selftype.test | 28 ++++++++++++++ 3 files changed, 96 insertions(+), 45 deletions(-) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 38fae16e7011..2724379ab878 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -823,9 +823,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Ensure each overload in the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() - possible_invalid_overloads = set() - for right_index, right_item in enumerate(right.items): + for right_item in right.items: found_match = False for left_index, left_item in enumerate(left.items): @@ -834,43 +833,36 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: - if not found_match: - # Update the index of the previous match. - previous_match_left_index = left_index - found_match = True - matched_overloads.add(left_item) - possible_invalid_overloads.discard(left_item) + previous_match_left_index = left_index + found_match = True + matched_overloads.add(left_index) + break else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. strict_concat = self.options.strict_concatenate if self.options else True - if is_callable_compatible( - left_item, - right_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, - ) or is_callable_compatible( - right_item, - left_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, + if left_index not in matched_overloads and ( + is_callable_compatible( + left_item, + right_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + or is_callable_compatible( + right_item, + left_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) ): - # If this is an overload that's already been matched, there's no - # problem. - if left_item not in matched_overloads: - possible_invalid_overloads.add(left_item) + return False if not found_match: return False - - if possible_invalid_overloads: - # There were potentially invalid overloads that were never matched to the - # supertype. - return False return True elif isinstance(right, UnboundType): return True diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 4ed44c90f275..42aaa68b5873 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -3872,28 +3872,59 @@ class Super: def foo(self, a: C) -> C: pass class Sub(Super): - @overload # Fail + @overload def foo(self, a: A) -> A: pass @overload def foo(self, a: B) -> C: pass # Fail @overload def foo(self, a: C) -> C: pass + +class Sub2(Super): + @overload + def foo(self, a: B) -> C: pass # Fail + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass + +class Sub3(Super): + @overload + def foo(self, a: A) -> int: pass + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass [builtins fixtures/classmethod.pyi] [out] -tmp/foo.pyi:16: error: Signature of "foo" incompatible with supertype "Super" -tmp/foo.pyi:16: note: Superclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C -tmp/foo.pyi:16: note: Subclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: B) -> C -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C tmp/foo.pyi:19: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader +tmp/foo.pyi:24: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:24: note: Superclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:24: note: Subclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: B) -> C +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:25: error: Overloaded function signatures 1 and 2 overlap with incompatible return types +tmp/foo.pyi:32: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:32: note: Superclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:32: note: Subclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> int +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:35: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testTypeTypeOverlapsWithObjectAndType] from foo import * diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 506e8bfe8ab1..bfb0eb5a4d89 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -128,6 +128,34 @@ reveal_type(cast(A, C()).copy()) # N: Revealed type is "__main__.A" [builtins fixtures/bool.pyi] +[case testSelfTypeOverrideCompatibility] +from typing import overload, TypeVar, Generic + +T = TypeVar("T") + +class A(Generic[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B2(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + @overload + def f(self: A[bytes]) -> bytes: ... + def f(self): ... + [case testSelfTypeSuper] from typing import TypeVar, cast From e8de6d1fc5c908e738f69494de38ea191fb12e60 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 7 Nov 2022 10:34:35 +0000 Subject: [PATCH 023/292] Add support for exception groups and except* (#14020) Ref #12840 It looks like from the point of view of type checking support is quite easy. Mypyc support however requires some actual work, so I don't include it in this PR. --- mypy/checker.py | 37 +++++++++++++++---- mypy/fastparse.py | 2 +- mypy/message_registry.py | 3 ++ mypy/nodes.py | 7 ++-- mypy/strconv.py | 2 ++ mypy/treetransform.py | 4 ++- mypyc/irbuild/statement.py | 2 ++ test-data/unit/check-python311.test | 51 +++++++++++++++++++++++++-- test-data/unit/fixtures/exception.pyi | 11 ++++-- 9 files changed, 104 insertions(+), 15 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 8973ade98228..5744a4ef4937 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4305,7 +4305,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: - t = self.check_except_handler_test(typ) + t = self.check_except_handler_test(typ, s.is_star) var = s.vars[i] if var: # To support local variables, we make this a definition line, @@ -4325,7 +4325,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: if s.else_body: self.accept(s.else_body) - def check_except_handler_test(self, n: Expression) -> Type: + def check_except_handler_test(self, n: Expression, is_star: bool) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) @@ -4341,22 +4341,47 @@ def check_except_handler_test(self, n: Expression) -> Type: item = ttype.items[0] if not item.is_type_obj(): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) - exc_type = item.ret_type + return self.default_exception_type(is_star) + exc_type = erase_typevars(item.ret_type) elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) if not is_subtype(exc_type, self.named_type("builtins.BaseException")): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) all_types.append(exc_type) + if is_star: + new_all_types: list[Type] = [] + for typ in all_types: + if is_proper_subtype(typ, self.named_type("builtins.BaseExceptionGroup")): + self.fail(message_registry.INVALID_EXCEPTION_GROUP, n) + new_all_types.append(AnyType(TypeOfAny.from_error)) + else: + new_all_types.append(typ) + return self.wrap_exception_group(new_all_types) return make_simplified_union(all_types) + def default_exception_type(self, is_star: bool) -> Type: + """Exception type to return in case of a previous type error.""" + any_type = AnyType(TypeOfAny.from_error) + if is_star: + return self.named_generic_type("builtins.ExceptionGroup", [any_type]) + return any_type + + def wrap_exception_group(self, types: Sequence[Type]) -> Type: + """Transform except* variable type into an appropriate exception group.""" + arg = make_simplified_union(types) + if is_subtype(arg, self.named_type("builtins.Exception")): + base = "builtins.ExceptionGroup" + else: + base = "builtins.BaseExceptionGroup" + return self.named_generic_type(base, [arg]) + def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: """Helper for check_except_handler_test to retrieve handler types.""" typ = get_proper_type(typ) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 0d42ef53f456..209ebb89f36b 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1254,7 +1254,6 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: return self.set_line(node, n) def visit_TryStar(self, n: TryStar) -> TryStmt: - # TODO: we treat TryStar exactly like Try, which makes mypy not crash. See #12840 vs = [ self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers ] @@ -1269,6 +1268,7 @@ def visit_TryStar(self, n: TryStar) -> TryStmt: self.as_block(n.orelse, n.lineno), self.as_block(n.finalbody, n.lineno), ) + node.is_star = True return self.set_line(node, n) # Assert(expr test, expr? msg) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index c84ce120dbda..18acb2cd7a71 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -44,6 +44,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage: NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") INVALID_EXCEPTION_TYPE: Final = ErrorMessage("Exception type must be derived from BaseException") +INVALID_EXCEPTION_GROUP: Final = ErrorMessage( + "Exception type in except* cannot derive from BaseExceptionGroup" +) RETURN_IN_ASYNC_GENERATOR: Final = ErrorMessage( '"return" with value in async generator is not allowed' ) diff --git a/mypy/nodes.py b/mypy/nodes.py index 9221ec48aa61..0ea89611dc1a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1529,9 +1529,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): - __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") - __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") body: Block # Try body # Plain 'except:' also possible @@ -1540,6 +1540,8 @@ class TryStmt(Statement): handlers: list[Block] # Except bodies else_body: Block | None finally_body: Block | None + # Whether this is try ... except* (added in Python 3.11) + is_star: bool def __init__( self, @@ -1557,6 +1559,7 @@ def __init__( self.handlers = handlers self.else_body = else_body self.finally_body = finally_body + self.is_star = False def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) diff --git a/mypy/strconv.py b/mypy/strconv.py index 1acf7699316c..9b369618b88e 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -276,6 +276,8 @@ def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: a: list[Any] = [o.body] + if o.is_star: + a.append("*") for i in range(len(o.vars)): a.append(o.types[i]) diff --git a/mypy/treetransform.py b/mypy/treetransform.py index d7f159d02a22..c863db6b3dd5 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -373,7 +373,7 @@ def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: - return TryStmt( + new = TryStmt( self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), @@ -381,6 +381,8 @@ def visit_try_stmt(self, node: TryStmt) -> TryStmt: self.optional_block(node.else_body), self.optional_block(node.finally_body), ) + new.is_star = node.is_star + return new def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt( diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 371a305e67b9..a1d36c011aa1 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -616,6 +616,8 @@ def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: # constructs that we compile separately. When we have a # try/except/else/finally, we treat the try/except/else as the # body of a try/finally block. + if t.is_star: + builder.error("Exception groups and except* cannot be compiled yet", t.line) if t.finally_body: def transform_try_body() -> None: diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index b98bccc9059d..9bf62b0c489d 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -1,6 +1,53 @@ -[case testTryStarDoesNotCrash] +[case testTryStarSimple] try: pass except* Exception as e: - reveal_type(e) # N: Revealed type is "builtins.Exception" + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +[builtins fixtures/exception.pyi] + +[case testTryStarMultiple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +except* RuntimeError as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.RuntimeError]" +[builtins fixtures/exception.pyi] + +[case testTryStarBase] +try: + pass +except* BaseException as e: + reveal_type(e) # N: Revealed type is "builtins.BaseExceptionGroup[builtins.BaseException]" +[builtins fixtures/exception.pyi] + +[case testTryStarTuple] +class Custom(Exception): ... + +try: + pass +except* (RuntimeError, Custom) as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, __main__.Custom]]" +[builtins fixtures/exception.pyi] + +[case testTryStarInvalidType] +class Bad: ... +try: + pass +except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalid] +try: + pass +except* ExceptionGroup as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalidTuple] +try: + pass +except* (RuntimeError, ExceptionGroup) as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, Any]]" [builtins fixtures/exception.pyi] diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index bf6d21c8716e..1c88723e7191 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -1,3 +1,4 @@ +import sys from typing import Generic, TypeVar T = TypeVar('T') @@ -5,7 +6,8 @@ class object: def __init__(self): pass class type: pass -class tuple(Generic[T]): pass +class tuple(Generic[T]): + def __ge__(self, other: object) -> bool: ... class function: pass class int: pass class str: pass @@ -13,11 +15,14 @@ class unicode: pass class bool: pass class ellipsis: pass -# Note: this is a slight simplification. In Python 2, the inheritance hierarchy -# is actually Exception -> StandardError -> RuntimeError -> ... class BaseException: def __init__(self, *args: object) -> None: ... class Exception(BaseException): pass class RuntimeError(Exception): pass class NotImplementedError(RuntimeError): pass +if sys.version_info >= (3, 11): + _BT_co = TypeVar("_BT_co", bound=BaseException, covariant=True) + _T_co = TypeVar("_T_co", bound=Exception, covariant=True) + class BaseExceptionGroup(BaseException, Generic[_BT_co]): ... + class ExceptionGroup(BaseExceptionGroup[_T_co], Exception): ... From d2a3e667bf0bedb19a1e88baaf7f3ffebc12c74a Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 7 Nov 2022 05:28:37 -0800 Subject: [PATCH 024/292] Fix crash with PartialTypes and the enum plugin (#14021) Fixes #12109. The original issue reported that the bug had to do with the use of the `--follow-imports=skip` flag. However, it turned out this was a red herring after closer inspection: I was able to trigger a more minimal repro both with and without this flag: ```python from enum import Enum class Foo(Enum): a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") b = None def check(self) -> None: reveal_type(Foo.a.value) # N: Revealed type is "" reveal_type(Foo.b.value) # N: Revealed type is "" ``` The first two `reveal_types` demonstrate the crux of the bug: the enum plugin does not correctly handle and convert partial types into regular types when inferring the type of the `.value` field. This can then cause any number of downstream problems. For example, suppose we modify `def check(...)` so it runs `reveal_type(self.value)`. Doing this will trigger a crash in mypy because it makes the enum plugin eventually try running `is_equivalent(...)` on the two partial types. But `is_equivalent` does not support partial types, so we crash. I opted to solve this problem by: 1. Making the enum plugin explicitly call the `fixup_partial_types` function on all field types. This prevents the code from crashing. 2. Modifies mypy so that Final vars are never marked as being PartialTypes. Without this, `reveal_type(Foo.b.value)` would report a type of `Union[Any, None]` instead of just `None`. (Note that all enum fields are implicitly final). --- mypy/checker.py | 50 ++++++++++++++++------------------ mypy/checkexpr.py | 3 +- mypy/plugins/enums.py | 3 +- mypy/typeops.py | 15 ++++++++++ test-data/unit/check-enum.test | 27 ++++++++++++++++++ 5 files changed, 70 insertions(+), 28 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 5744a4ef4937..3ebc829daa0e 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -159,6 +159,7 @@ erase_to_bound, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, get_type_vars, is_literal_type_like, @@ -2738,8 +2739,8 @@ def check_assignment( # None initializers preserve the partial None type. return - if is_valid_inferred_type(rvalue_type): - var = lvalue_type.var + var = lvalue_type.var + if is_valid_inferred_type(rvalue_type, is_lvalue_final=var.is_final): partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: @@ -3687,7 +3688,10 @@ def infer_variable_type( """Infer the type of initialized variables from initializer type.""" if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) - elif not is_valid_inferred_type(init_type) and not self.no_partial_types: + elif ( + not is_valid_inferred_type(init_type, is_lvalue_final=name.is_final) + and not self.no_partial_types + ): # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type @@ -6114,7 +6118,7 @@ def enter_partial_types( self.msg.need_annotation_for_var(var, context, self.options.python_version) self.partial_reported.add(var) if var.type: - fixed = self.fixup_partial_type(var.type) + fixed = fixup_partial_type(var.type) var.invalid_partial_type = fixed != var.type var.type = fixed @@ -6145,20 +6149,7 @@ def handle_partial_var_type( else: # Defer the node -- we might get a better type in the outer scope self.handle_cannot_determine_type(node.name, context) - return self.fixup_partial_type(typ) - - def fixup_partial_type(self, typ: Type) -> Type: - """Convert a partial type that we couldn't resolve into something concrete. - - This means, for None we make it Optional[Any], and for anything else we - fill in all of the type arguments with Any. - """ - if not isinstance(typ, PartialType): - return typ - if typ.type is None: - return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) - else: - return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: if var.info: @@ -7006,20 +6997,27 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st return False, method -def is_valid_inferred_type(typ: Type) -> bool: - """Is an inferred type valid? +def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: + """Is an inferred type valid and needs no further refinement? - Examples of invalid types include the None type or List[]. + Examples of invalid types include the None type (when we are not assigning + None to a final lvalue) or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ - if isinstance(get_proper_type(typ), (NoneType, UninhabitedType)): - # With strict Optional checking, we *may* eventually infer NoneType when - # the initializer is None, but we only do that if we can't infer a - # specific Optional type. This resolution happens in - # leave_partial_types when we pop a partial types scope. + proper_type = get_proper_type(typ) + if isinstance(proper_type, NoneType): + # If the lvalue is final, we may immediately infer NoneType when the + # initializer is None. + # + # If not, we want to defer making this decision. The final inferred + # type could either be NoneType or an Optional type, depending on + # the context. This resolution happens in leave_partial_types when + # we pop a partial types scope. + return is_lvalue_final + elif isinstance(proper_type, UninhabitedType): return False return not typ.accept(NothingSeeker()) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index ac16f9c9c813..0c392ae755d7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -111,6 +111,7 @@ custom_special_method, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, is_literal_type_like, make_simplified_union, @@ -2925,7 +2926,7 @@ def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: if isinstance(expr.node, Var): result = self.analyze_var_ref(expr.node, expr) if isinstance(result, PartialType) and result.type is not None: - self.chk.store_type(expr, self.chk.fixup_partial_type(result)) + self.chk.store_type(expr, fixup_partial_type(result)) return result return None diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 75b301252f06..1acf42d11ee6 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -19,7 +19,7 @@ from mypy.nodes import TypeInfo from mypy.semanal_enum import ENUM_BASES from mypy.subtypes import is_equivalent -from mypy.typeops import make_simplified_union +from mypy.typeops import fixup_partial_type, make_simplified_union from mypy.types import CallableType, Instance, LiteralType, ProperType, Type, get_proper_type ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { @@ -77,6 +77,7 @@ def _infer_value_type_with_auto_fallback( """ if proper_type is None: return None + proper_type = get_proper_type(fixup_partial_type(proper_type)) if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): return proper_type assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." diff --git a/mypy/typeops.py b/mypy/typeops.py index 5b29dc71991b..9f224e02c088 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -41,6 +41,7 @@ Overloaded, Parameters, ParamSpecType, + PartialType, ProperType, TupleType, Type, @@ -1016,3 +1017,17 @@ def try_getting_instance_fallback(typ: Type) -> Instance | None: elif isinstance(typ, TypeVarType): return try_getting_instance_fallback(typ.upper_bound) return None + + +def fixup_partial_type(typ: Type) -> Type: + """Convert a partial type that we couldn't resolve into something concrete. + + This means, for None we make it Optional[Any], and for anything else we + fill in all of the type arguments with Any. + """ + if not isinstance(typ, PartialType): + return typ + if typ.type is None: + return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) + else: + return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 039ddd1621cd..db8643455099 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -2100,3 +2100,30 @@ class Some: class A(Some, Enum): __labels__ = {1: "1"} [builtins fixtures/dict.pyi] + +[case testEnumWithPartialTypes] +from enum import Enum + +class Mixed(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = None + + def check(self) -> None: + reveal_type(Mixed.a.value) # N: Revealed type is "builtins.list[Any]" + reveal_type(Mixed.b.value) # N: Revealed type is "None" + + # Inferring Any here instead of a union seems to be a deliberate + # choice; see the testEnumValueInhomogenous case above. + reveal_type(self.value) # N: Revealed type is "Any" + + for field in Mixed: + reveal_type(field.value) # N: Revealed type is "Any" + if field.value is None: + pass + +class AllPartialList(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = [] # E: Need type annotation for "b" (hint: "b: List[] = ...") + + def check(self) -> None: + reveal_type(self.value) # N: Revealed type is "builtins.list[Any]" From 39d35cdee1bd02a6fb071334273e1c1fb0893066 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 7 Nov 2022 15:38:41 +0000 Subject: [PATCH 025/292] Fix new style union syntax in type aliases (#14008) Fix Python 3.10 `|` union syntax in type aliases, when one of the operands is a type alias or a type with an overloaded `__init__`. We can now infer `typing._SpecialForm` for type aliases in a runtime context. Also create a bunch of minimal test-only stubs for stdlib modules to fix some test failures caused by the missing `typing._SpecialForm` in the default test stubs. This is generally what we want in any case, since using typeshed stubs with minimal builtins/typing stubs can result in unpredictable behavior and slow tests. Fixes #12368. Fixes #12005. Fixes #11426. --- mypy/checker.py | 8 ++-- mypy/checkexpr.py | 7 ++- mypy/messages.py | 4 ++ mypyc/test-data/run-async.test | 3 ++ test-data/unit/check-ctypes.test | 13 ++++++ test-data/unit/check-dataclasses.test | 3 +- test-data/unit/check-generics.test | 8 ++-- test-data/unit/check-literal.test | 5 +- test-data/unit/check-python310.test | 16 +++++++ test-data/unit/check-type-aliases.test | 26 ++++++----- test-data/unit/fixtures/args.pyi | 1 + test-data/unit/fixtures/type.pyi | 1 + test-data/unit/fixtures/typing-full.pyi | 2 + test-data/unit/fixtures/typing-medium.pyi | 2 + test-data/unit/lib-stub/_decimal.pyi | 4 ++ test-data/unit/lib-stub/datetime.pyi | 16 +++++++ test-data/unit/lib-stub/decimal.pyi | 3 ++ test-data/unit/lib-stub/functools.pyi | 35 ++++++++++++++ test-data/unit/lib-stub/traceback.pyi | 3 ++ test-data/unit/lib-stub/unannotated_lib.pyi | 1 + test-data/unit/pythoneval.test | 52 +++++++++++++++++++++ 21 files changed, 186 insertions(+), 27 deletions(-) create mode 100644 test-data/unit/lib-stub/_decimal.pyi create mode 100644 test-data/unit/lib-stub/datetime.pyi create mode 100644 test-data/unit/lib-stub/decimal.pyi create mode 100644 test-data/unit/lib-stub/functools.pyi create mode 100644 test-data/unit/lib-stub/traceback.pyi create mode 100644 test-data/unit/lib-stub/unannotated_lib.pyi diff --git a/mypy/checker.py b/mypy/checker.py index 3ebc829daa0e..67d132afe2c7 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -6048,11 +6048,11 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: last = parts[-1] if last in n.names: return n.names[last] - elif len(parts) == 2 and parts[0] == "builtins": - fullname = "builtins." + last + elif len(parts) == 2 and parts[0] in ("builtins", "typing"): + fullname = ".".join(parts) if fullname in SUGGESTED_TEST_FIXTURES: - suggestion = ", e.g. add '[builtins fixtures/{}]' to your test".format( - SUGGESTED_TEST_FIXTURES[fullname] + suggestion = ", e.g. add '[{} fixtures/{}]' to your test".format( + parts[0], SUGGESTED_TEST_FIXTURES[fullname] ) else: suggestion = "" diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0c392ae755d7..a271fb876bf3 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3870,9 +3870,8 @@ class LongName(Generic[T]): ... else: if alias_definition: return AnyType(TypeOfAny.special_form) - # This type is invalid in most runtime contexts, give it an 'object' type. - # TODO: Use typing._SpecialForm instead? - return self.named_type("builtins.object") + # The _SpecialForm type can be used in some runtime contexts (e.g. it may have __or__). + return self.named_type("typing._SpecialForm") def apply_type_arguments_to_callable( self, tp: Type, args: Sequence[Type], ctx: Context @@ -4742,7 +4741,7 @@ def has_member(self, typ: Type, member: str) -> bool: typ = typ.fallback if isinstance(typ, Instance): return typ.type.has_readable_member(member) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True diff --git a/mypy/messages.py b/mypy/messages.py index 4e762faa0b32..e11ee9d0f7f2 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -132,6 +132,7 @@ "builtins.isinstance": "isinstancelist.pyi", "builtins.property": "property.pyi", "builtins.classmethod": "classmethod.pyi", + "typing._SpecialForm": "typing-medium.pyi", } @@ -2253,6 +2254,9 @@ def format_literal_value(typ: LiteralType) -> str: if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: return f"{base_str} {itype.extra_attrs.mod_name}" return base_str + if itype.type.fullname == "typing._SpecialForm": + # This is not a real type but used for some typing-related constructs. + return "" if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: diff --git a/mypyc/test-data/run-async.test b/mypyc/test-data/run-async.test index e664ed3bb55a..85ad172d61df 100644 --- a/mypyc/test-data/run-async.test +++ b/mypyc/test-data/run-async.test @@ -13,6 +13,9 @@ async def g() -> int: async def f() -> int: return await g() +[file asyncio/__init__.pyi] +async def sleep(t: float) -> None: ... + [typing fixtures/typing-full.pyi] [file driver.py] diff --git a/test-data/unit/check-ctypes.test b/test-data/unit/check-ctypes.test index 5a350256f8e9..beb1afd779c0 100644 --- a/test-data/unit/check-ctypes.test +++ b/test-data/unit/check-ctypes.test @@ -20,6 +20,7 @@ a[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches ar for x in a: reveal_type(x) # N: Revealed type is "builtins.int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayCustomElementType] import ctypes @@ -52,6 +53,7 @@ myu: Union[ctypes.Array[ctypes.c_int], List[str]] for myi in myu: reveal_type(myi) # N: Revealed type is "Union[builtins.int, builtins.str]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayUnionElementType] import ctypes @@ -76,6 +78,7 @@ mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches for myx in mya: reveal_type(myx) # N: Revealed type is "Union[__main__.MyCInt, builtins.int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharArrayAttrs] import ctypes @@ -84,6 +87,7 @@ ca = (ctypes.c_char * 4)(b'a', b'b', b'c', b'\x00') reveal_type(ca.value) # N: Revealed type is "builtins.bytes" reveal_type(ca.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharPArrayDoesNotCrash] import ctypes @@ -91,6 +95,7 @@ import ctypes # The following line used to crash with "Could not find builtin symbol 'NoneType'" ca = (ctypes.c_char_p * 0)() [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesWcharArrayAttrs] import ctypes @@ -99,6 +104,7 @@ wca = (ctypes.c_wchar * 4)('a', 'b', 'c', '\x00') reveal_type(wca.value) # N: Revealed type is "builtins.str" wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharUnionArrayAttrs] import ctypes @@ -108,6 +114,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_wchar]] reveal_type(cua.value) # N: Revealed type is "Union[builtins.bytes, builtins.str]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_wchar]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyUnionArrayAttrs] import ctypes @@ -117,6 +124,7 @@ caa: ctypes.Array[Union[ctypes.c_char, Any]] reveal_type(caa.value) # N: Revealed type is "Union[builtins.bytes, Any]" reveal_type(caa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherUnionArrayAttrs] import ctypes @@ -126,6 +134,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_int]] cua.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "Union[c_char, c_int]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyArrayAttrs] import ctypes @@ -134,6 +143,7 @@ aa: ctypes.Array[Any] reveal_type(aa.value) # N: Revealed type is "Any" reveal_type(aa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherArrayAttrs] import ctypes @@ -142,6 +152,7 @@ oa = (ctypes.c_int * 4)(1, 2, 3, 4) oa.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "c_int" oa.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayConstructorStarargs] import ctypes @@ -154,6 +165,7 @@ reveal_type(intarr4(*int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_ reveal_type(intarr4(*c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +[typing fixtures/typing-medium.pyi] float_values = [1.0, 2.0, 3.0, 4.0] intarr4(*float_values) # E: Array constructor argument 1 of type "List[float]" is not convertible to the array element type "Iterable[c_int]" @@ -167,3 +179,4 @@ x = {"a": 1, "b": 2} intarr4(**x) [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 3ec4c60e6929..d4064124109b 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -633,8 +633,9 @@ class Two: c = Two() x = c.S -reveal_type(x) # N: Revealed type is "builtins.object" +reveal_type(x) # N: Revealed type is "typing._SpecialForm" [builtins fixtures/dataclasses.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassOrdering] # flags: --python-version 3.7 diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index b7d98a783a49..7df52b60fc0b 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -1049,20 +1049,20 @@ CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] -cs = CA + 1 # E: Unsupported left operand type for + ("object") +cs = CA + 1 # E: Unsupported left operand type for + ("") reveal_type(cs) # N: Revealed type is "Any" -ts = TA() # E: "object" not callable +ts = TA() # E: "" not callable reveal_type(ts) # N: Revealed type is "Any" -us = UA.x # E: "object" has no attribute "x" +us = UA.x # E: "" has no attribute "x" reveal_type(us) # N: Revealed type is "Any" xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes [builtins fixtures/tuple.pyi] - +[typing fixtures/typing-medium.pyi] [out] [case testGenericTypeAliasesTypeVarBinding] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index da8f1570a4f4..ef8c9095e58a 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1484,16 +1484,17 @@ Alias = Literal[3] isinstance(3, Literal[3]) # E: Cannot use isinstance() with Literal type isinstance(3, Alias) # E: Cannot use isinstance() with Literal type \ - # E: Argument 2 to "isinstance" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "isinstance" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" isinstance(3, Renamed[3]) # E: Cannot use isinstance() with Literal type isinstance(3, indirect.Literal[3]) # E: Cannot use isinstance() with Literal type issubclass(int, Literal[3]) # E: Cannot use issubclass() with Literal type issubclass(int, Alias) # E: Cannot use issubclass() with Literal type \ - # E: Argument 2 to "issubclass" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "issubclass" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" issubclass(int, Renamed[3]) # E: Cannot use issubclass() with Literal type issubclass(int, indirect.Literal[3]) # E: Cannot use issubclass() with Literal type [builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testLiteralErrorsWhenSubclassed] diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 1548d5dadcfd..3b90a910e943 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1788,3 +1788,19 @@ def f6(a: object) -> None: case _ if y is not None: # E: Name "y" may be undefined pass [builtins fixtures/tuple.pyi] + +[case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] +from typing import overload +class C: + @overload + def __init__(self) -> None: pass + @overload + def __init__(self, x: int) -> None: pass + def __init__(self, x=0): + pass + +class D: pass + +X = None | C +Y = None | D +[builtins fixtures/type.pyi] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 8dafc8f47a6c..fab372976ab2 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -821,28 +821,28 @@ c = Child() reveal_type(NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Parent.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Parent.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Parent.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Parent.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Parent.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Parent.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Child.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Child.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Child.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Child.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Child.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Child.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(p.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(p.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(p.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(p.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(p.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(c.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(c.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(c.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(c.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(c.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" # Use type aliases in a type alias context in a plausible way @@ -895,6 +895,7 @@ reveal_type(weird_child_2) # N: Revealed type is "def () -> Any" reveal_type(weird_child_3) # N: Revealed type is "def () -> Any" reveal_type(weird_child_4) # N: Revealed type is "def () -> Any" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] [case testMalformedTypeAliasRuntimeReassignments] from typing import Union @@ -927,8 +928,8 @@ SpecialExplicit = 4 # E: Cannot assign multiple types to name "SpecialExplicit" Parent.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Parent.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") -Parent.SpecialImplicit = 4 -Parent.SpecialExplicit = 4 +Parent.SpecialImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") +Parent.SpecialExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") Child.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Child.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") @@ -945,3 +946,4 @@ c.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type c.SpecialImplicit = 4 c.SpecialExplicit = 4 [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi index 8d0ecc00f4b6..9985ccf84817 100644 --- a/test-data/unit/fixtures/args.pyi +++ b/test-data/unit/fixtures/args.pyi @@ -26,6 +26,7 @@ class list(Sequence[T], Generic[T]): pass class int: def __eq__(self, o: object) -> bool: pass +class float: pass class str: pass class bytes: pass class bool: pass diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 755b45ff0bb5..77feb41ba70b 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -13,6 +13,7 @@ class list(Generic[T]): pass class type(Generic[T]): __name__: str def __or__(self, other: Union[type, None]) -> type: pass + def __ror__(self, other: Union[type, None]) -> type: pass def mro(self) -> List['type']: pass class tuple(Generic[T]): pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index c406da986818..04568f7c03f3 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -179,3 +179,5 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 568fe057c4cf..863b0703989d 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -68,4 +68,6 @@ class ContextManager(Generic[T]): # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass +class _SpecialForm: pass + TYPE_CHECKING = 1 diff --git a/test-data/unit/lib-stub/_decimal.pyi b/test-data/unit/lib-stub/_decimal.pyi new file mode 100644 index 000000000000..2c2c5bff11f7 --- /dev/null +++ b/test-data/unit/lib-stub/_decimal.pyi @@ -0,0 +1,4 @@ +# Very simplified decimal stubs for use in tests + +class Decimal: + def __new__(cls, value: str = ...) -> Decimal: ... diff --git a/test-data/unit/lib-stub/datetime.pyi b/test-data/unit/lib-stub/datetime.pyi new file mode 100644 index 000000000000..7d71682d051d --- /dev/null +++ b/test-data/unit/lib-stub/datetime.pyi @@ -0,0 +1,16 @@ +# Very simplified datetime stubs for use in tests + +class datetime: + def __new__( + cls, + year: int, + month: int, + day: int, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + *, + fold: int = ..., + ) -> datetime: ... + def __format__(self, __fmt: str) -> str: ... diff --git a/test-data/unit/lib-stub/decimal.pyi b/test-data/unit/lib-stub/decimal.pyi new file mode 100644 index 000000000000..d2ab6eda9ff1 --- /dev/null +++ b/test-data/unit/lib-stub/decimal.pyi @@ -0,0 +1,3 @@ +# Very simplified decimal stubs for use in tests + +from _decimal import * diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi new file mode 100644 index 000000000000..9e62a14c2f34 --- /dev/null +++ b/test-data/unit/lib-stub/functools.pyi @@ -0,0 +1,35 @@ +from typing import Generic, TypeVar, Callable, Any, Mapping + +_T = TypeVar("_T") + +class _SingleDispatchCallable(Generic[_T]): + registry: Mapping[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... + +class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + def __class_getitem__(cls, item: Any) -> Any: ... diff --git a/test-data/unit/lib-stub/traceback.pyi b/test-data/unit/lib-stub/traceback.pyi new file mode 100644 index 000000000000..83c1891f80f5 --- /dev/null +++ b/test-data/unit/lib-stub/traceback.pyi @@ -0,0 +1,3 @@ +# Very simplified traceback stubs for use in tests + +def print_tb(*args, **kwargs) -> None: ... diff --git a/test-data/unit/lib-stub/unannotated_lib.pyi b/test-data/unit/lib-stub/unannotated_lib.pyi new file mode 100644 index 000000000000..90bfb6fa47d6 --- /dev/null +++ b/test-data/unit/lib-stub/unannotated_lib.pyi @@ -0,0 +1 @@ +def f(x): ... diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 692f62bf6454..f6336b48ee7b 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1660,3 +1660,55 @@ _testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" _testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" _testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" _testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" + +[case testTypeAliasWithNewStyleUnion] +# flags: --python-version 3.10 +from typing import Literal, Type, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +[out] +_testTypeAliasWithNewStyleUnion.py:5: note: Revealed type is "typing._SpecialForm" + +[case testTypeAliasWithNewStyleUnionInStub] +# flags: --python-version 3.7 +import m + +[file m.pyi] +from typing import Type +from typing_extensions import Literal, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +[out] +m.pyi:5: note: Revealed type is "typing._SpecialForm" From 7465abd3d2fe4211a8aaa9453b402fea60071561 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Tue, 8 Nov 2022 15:22:56 +0200 Subject: [PATCH 026/292] Filter out wasm32 wheel in upload-pypi.py (#14035) Tried to make it in a way such that we can add more rules for platforms we want to filter out in the future. Tested it with `python3 misc/upload-pypi.py --dry-run 0.990` Fixes #14026 --- misc/upload-pypi.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index be8da9e44f86..e60ec3cca207 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -29,6 +29,21 @@ def is_whl_or_tar(name: str) -> bool: return name.endswith(".tar.gz") or name.endswith(".whl") +def item_ok_for_pypi(name: str) -> bool: + if not is_whl_or_tar(name): + return False + + if name.endswith(".tar.gz"): + name = name[:-7] + if name.endswith(".whl"): + name = name[:-4] + + if name.endswith("wasm32"): + return False + + return True + + def get_release_for_tag(tag: str) -> dict[str, Any]: with urlopen(f"{BASE}/{REPO}/releases/tags/{tag}") as f: data = json.load(f) @@ -75,7 +90,7 @@ def check_sdist(dist: Path, version: str) -> None: def spot_check_dist(dist: Path, version: str) -> None: - items = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + items = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] assert len(items) > 10 assert all(version in item.name for item in items) assert any(item.name.endswith("py3-none-any.whl") for item in items) @@ -93,7 +108,7 @@ def tmp_twine() -> Iterator[Path]: def upload_dist(dist: Path, dry_run: bool = True) -> None: with tmp_twine() as twine: - files = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + files = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] cmd: list[Any] = [twine, "upload"] cmd += files if dry_run: From fa22a49ef1154285d4f251086f85cadf16e54cd9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 8 Nov 2022 15:43:57 +0000 Subject: [PATCH 027/292] Fix crash on inference with recursive alias to recursive instance (#14038) Fixes #14031 It turns out premature optimization is the root of all evil. (It turns out this costs us less than 1% time on self-check). --- mypy/constraints.py | 3 ++- mypy/types.py | 6 ++++++ test-data/unit/check-recursive-types.test | 11 +++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 49b042d5baf0..2a641bf27ed5 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -177,8 +177,9 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons for (t, a) in reversed(TypeState.inferring) ): return [] - if has_recursive_types(template): + if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): # This case requires special care because it may cause infinite recursion. + # Note that we include Instances because the may be recursive as str(Sequence[str]). if not has_type_vars(template): # Return early on an empty branch. return [] diff --git a/mypy/types.py b/mypy/types.py index e322cf02505f..2f0feb703f6a 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3240,6 +3240,12 @@ def __init__(self) -> None: def visit_type_var(self, t: TypeVarType) -> bool: return True + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 0d727b109658..95b0918866f1 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -826,3 +826,14 @@ z = z x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") z = x # OK [builtins fixtures/tuple.pyi] + +[case testRecursiveInstanceInferenceNoCrash] +from typing import Sequence, TypeVar, Union + +class C(Sequence[C]): ... + +T = TypeVar("T") +def foo(x: T) -> C: ... + +Nested = Union[C, Sequence[Nested]] +x: Nested = foo(42) From dbcbb3f5c3ef791c98088da0bd1dfa6cbf51f301 Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Tue, 8 Nov 2022 17:57:39 -0800 Subject: [PATCH 028/292] [mypyc] Use tabs instead of spaces in emitted C code (#14016) By using tabs instead of spaces for indentation in the emitted C code we are able to reduce the file size by almost 9%: | File | Size | |------|------| | `build/__native_74cdc94b2b24dafac2a2.c` (spaces) | 86.5MB | | `build/__native_74cdc94b2b24dafac2a2.c` (tabs) | 79.6MB | For this particular file we save about 6.9MB, or 8.7%. I checked, and this has no effect on the compilation speed, which is to be expected. At the very least opening these auto generated files inside an editor will be faster, even if the compilation isn't any faster. I am interested in making mypy/mypyc faster, and this seemed like a low-hanging fruit that could be beneficial. --- mypyc/codegen/emit.py | 6 +++--- mypyc/test/test_emit.py | 2 +- mypyc/test/test_emitfunc.py | 6 +++--- mypyc/test/test_emitwrapper.py | 3 ++- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 5d47636b4c1e..15dece700a1e 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -176,10 +176,10 @@ def __init__( # Low-level operations def indent(self) -> None: - self._indent += 4 + self._indent += 1 def dedent(self) -> None: - self._indent -= 4 + self._indent -= 1 assert self._indent >= 0 def label(self, label: BasicBlock) -> str: @@ -194,7 +194,7 @@ def attr(self, name: str) -> str: def emit_line(self, line: str = "") -> None: if line.startswith("}"): self.dedent() - self.fragments.append(self._indent * " " + line + "\n") + self.fragments.append(self._indent * "\t" + line + "\n") if line.endswith("{"): self.indent() diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 7351cd7fb13e..1b624a7a6cdb 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -28,4 +28,4 @@ def test_emit_line(self) -> None: emitter.emit_line("a {") emitter.emit_line("f();") emitter.emit_line("}") - assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"] + assert emitter.fragments == ["line;\n", "a {\n", "\tf();\n", "}\n"] diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index d7dcf3be532b..3b44f7e444c8 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -833,7 +833,7 @@ def assert_emit( op.accept(visitor) frags = declarations.fragments + emitter.fragments - actual_lines = [line.strip(" ") for line in frags] + actual_lines = [line.strip(" \t") for line in frags] assert all(line.endswith("\n") for line in actual_lines) actual_lines = [line.rstrip("\n") for line in actual_lines] if not expected.strip(): @@ -900,7 +900,7 @@ def test_simple(self) -> None: " return cpy_r_arg;\n", "}\n", ], - result, + [line.replace("\t", 4 * " ") for line in result], msg="Generated code invalid", ) @@ -927,6 +927,6 @@ def test_register(self) -> None: " CPy_Unreachable();\n", "}\n", ], - result, + [line.replace("\t", 4 * " ") for line in result], msg="Generated code invalid", ) diff --git a/mypyc/test/test_emitwrapper.py b/mypyc/test/test_emitwrapper.py index c4465656444c..ec5adb4c6622 100644 --- a/mypyc/test/test_emitwrapper.py +++ b/mypyc/test/test_emitwrapper.py @@ -56,5 +56,6 @@ def test_check_int(self) -> None: ) def assert_lines(self, expected: list[str], actual: list[str]) -> None: - actual = [line.rstrip("\n") for line in actual] + actual = [line.rstrip("\n").replace(4 * " ", "\t") for line in actual] + expected = [line.replace(4 * " ", "\t") for line in expected] assert_string_arrays_equal(expected, actual, "Invalid output") From 5cc14390337f28e6f90efaf0ad5c4b1d322a8638 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 10 Nov 2022 17:37:09 +0000 Subject: [PATCH 029/292] Don't ignore errors in files passed on the command line (#14060) #13768 had a bug so that errors were sometimes silenced in files that were under a directory in `sys.path`. `sys.path` sometimes includes the current working directory, resulting in no errors reported at all. Fix it by always reporting errors if a file was passed on the command line (unless *explicitly* silenced). When using import following errors can still be ignored, which is questionable, but this didn't change recently so I'm not addressing it here. Fixes #14042. --- mypy/build.py | 10 ++++-- mypy/dmypy_server.py | 14 ++++---- mypy/modulefinder.py | 8 +++-- mypy/server/update.py | 28 +++++++++++----- mypy/test/testcmdline.py | 8 ++--- test-data/unit/cmdline.test | 65 +++++++++++++++++++++++++++++++++++++ 6 files changed, 108 insertions(+), 25 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 31851680ea82..27dc1141ce28 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1940,7 +1940,7 @@ def __init__( raise if follow_imports == "silent": self.ignore_all = True - elif path and is_silent_import_module(manager, path): + elif path and is_silent_import_module(manager, path) and not root_source: self.ignore_all = True self.path = path if path: @@ -2629,7 +2629,7 @@ def find_module_and_diagnose( else: skipping_module(manager, caller_line, caller_state, id, result) raise ModuleNotFound - if is_silent_import_module(manager, result): + if is_silent_import_module(manager, result) and not root_source: follow_imports = "silent" return (result, follow_imports) else: @@ -3024,7 +3024,11 @@ def load_graph( for bs in sources: try: st = State( - id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True + id=bs.module, + path=bs.path, + source=bs.text, + manager=manager, + root_source=not bs.followed, ) except ModuleNotFound: continue diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 671999065e7d..be2f4ab8d618 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -592,7 +592,7 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources.extend(new_files) # Process changes directly reachable from roots. - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) # Follow deps from changed modules (still within graph). worklist = changed[:] @@ -609,13 +609,13 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources2, graph, seen, changed_paths ) self.update_sources(new_files) - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) worklist.extend(changed) t2 = time.time() def refresh_file(module: str, path: str) -> list[str]: - return fine_grained_manager.update([(module, path)], []) + return fine_grained_manager.update([(module, path)], [], followed=True) for module_id, state in list(graph.items()): new_messages = refresh_suppressed_submodules( @@ -632,10 +632,10 @@ def refresh_file(module: str, path: str) -> list[str]: new_unsuppressed = self.find_added_suppressed(graph, seen, manager.search_paths) if not new_unsuppressed: break - new_files = [BuildSource(mod[1], mod[0]) for mod in new_unsuppressed] + new_files = [BuildSource(mod[1], mod[0], followed=True) for mod in new_unsuppressed] sources.extend(new_files) self.update_sources(new_files) - messages = fine_grained_manager.update(new_unsuppressed, []) + messages = fine_grained_manager.update(new_unsuppressed, [], followed=True) for module_id, path in new_unsuppressed: new_messages = refresh_suppressed_submodules( @@ -717,7 +717,7 @@ def find_reachable_changed_modules( for dep in state.dependencies: if dep not in seen: seen.add(dep) - worklist.append(BuildSource(graph[dep].path, graph[dep].id)) + worklist.append(BuildSource(graph[dep].path, graph[dep].id, followed=True)) return changed, new_files def direct_imports( @@ -725,7 +725,7 @@ def direct_imports( ) -> list[BuildSource]: """Return the direct imports of module not included in seen.""" state = graph[module[0]] - return [BuildSource(graph[dep].path, dep) for dep in state.dependencies] + return [BuildSource(graph[dep].path, dep, followed=True) for dep in state.dependencies] def find_added_suppressed( self, graph: mypy.build.Graph, seen: set[str], search_paths: SearchPaths diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 5d542b154906..e64dba5ce29d 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -115,15 +115,19 @@ def __init__( module: str | None, text: str | None = None, base_dir: str | None = None, + followed: bool = False, ) -> None: self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') self.module = module or "__main__" # Module name (e.g. 'foo.bar') self.text = text # Source code, if initially supplied, else None self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') + self.followed = followed # Was this found by following imports? def __repr__(self) -> str: - return "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r})".format( - self.path, self.module, self.text is not None, self.base_dir + return ( + "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r}, followed={})".format( + self.path, self.module, self.text is not None, self.base_dir, self.followed + ) ) diff --git a/mypy/server/update.py b/mypy/server/update.py index 686068a4aad0..a1f57b5a6746 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -203,7 +203,10 @@ def __init__(self, result: BuildResult) -> None: self.processed_targets: list[str] = [] def update( - self, changed_modules: list[tuple[str, str]], removed_modules: list[tuple[str, str]] + self, + changed_modules: list[tuple[str, str]], + removed_modules: list[tuple[str, str]], + followed: bool = False, ) -> list[str]: """Update previous build result by processing changed modules. @@ -219,6 +222,7 @@ def update( Assume this is correct; it's not validated here. removed_modules: Modules that have been deleted since the previous update or removed from the build. + followed: If True, the modules were found through following imports Returns: A list of errors. @@ -256,7 +260,9 @@ def update( self.blocking_error = None while True: - result = self.update_one(changed_modules, initial_set, removed_set, blocking_error) + result = self.update_one( + changed_modules, initial_set, removed_set, blocking_error, followed + ) changed_modules, (next_id, next_path), blocker_messages = result if blocker_messages is not None: @@ -329,6 +335,7 @@ def update_one( initial_set: set[str], removed_set: set[str], blocking_error: str | None, + followed: bool, ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Process a module from the list of changed modules. @@ -355,7 +362,7 @@ def update_one( ) return changed_modules, (next_id, next_path), None - result = self.update_module(next_id, next_path, next_id in removed_set) + result = self.update_module(next_id, next_path, next_id in removed_set, followed) remaining, (next_id, next_path), blocker_messages = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(remaining + changed_modules) @@ -368,7 +375,7 @@ def update_one( return changed_modules, (next_id, next_path), blocker_messages def update_module( - self, module: str, path: str, force_removed: bool + self, module: str, path: str, force_removed: bool, followed: bool ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Update a single modified module. @@ -380,6 +387,7 @@ def update_module( path: File system path of the module force_removed: If True, consider module removed from the build even if path exists (used for removing an existing file from the build) + followed: Was this found via import following? Returns: Tuple with these items: @@ -417,7 +425,7 @@ def update_module( manager.errors.reset() self.processed_targets.append(module) result = update_module_isolated( - module, path, manager, previous_modules, graph, force_removed + module, path, manager, previous_modules, graph, force_removed, followed ) if isinstance(result, BlockedUpdate): # Blocking error -- just give up @@ -552,6 +560,7 @@ def update_module_isolated( previous_modules: dict[str, str], graph: Graph, force_removed: bool, + followed: bool, ) -> UpdateResult: """Build a new version of one changed module only. @@ -575,7 +584,7 @@ def update_module_isolated( delete_module(module, path, graph, manager) return NormalUpdate(module, path, [], None) - sources = get_sources(manager.fscache, previous_modules, [(module, path)]) + sources = get_sources(manager.fscache, previous_modules, [(module, path)], followed) if module in manager.missing_modules: manager.missing_modules.remove(module) @@ -728,12 +737,15 @@ def get_module_to_path_map(graph: Graph) -> dict[str, str]: def get_sources( - fscache: FileSystemCache, modules: dict[str, str], changed_modules: list[tuple[str, str]] + fscache: FileSystemCache, + modules: dict[str, str], + changed_modules: list[tuple[str, str]], + followed: bool, ) -> list[BuildSource]: sources = [] for id, path in changed_modules: if fscache.isfile(path): - sources.append(BuildSource(path, id, None)) + sources.append(BuildSource(path, id, None, followed=followed)) return sources diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 268b6bab1ec2..2e8b0dc9a1cd 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -69,12 +69,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: env["PYTHONPATH"] = PREFIX if os.path.isdir(extra_path): env["PYTHONPATH"] += os.pathsep + extra_path + cwd = os.path.join(test_temp_dir, custom_cwd or "") + args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] process = subprocess.Popen( - fixed + args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=os.path.join(test_temp_dir, custom_cwd or ""), - env=env, + fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env ) outb, errb = process.communicate() result = process.returncode diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 2ea7f07da3bc..92b0af6942bc 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1505,3 +1505,68 @@ def f(): [out] a.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs == Return code: 0 + +[case testCustomTypeshedDirFilePassedExplicitly] +# cmd: mypy --custom-typeshed-dir dir m.py dir/stdlib/foo.pyi +[file m.py] +1() +[file dir/stdlib/abc.pyi] +1() # Errors are not reported from typeshed by default +[file dir/stdlib/builtins.pyi] +class object: pass +class str(object): pass +class int(object): pass +[file dir/stdlib/sys.pyi] +[file dir/stdlib/types.pyi] +[file dir/stdlib/typing.pyi] +[file dir/stdlib/mypy_extensions.pyi] +[file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/foo.pyi] +1() # Errors are reported if the file was explicitly passed on the command line +[file dir/stdlib/VERSIONS] +[out] +dir/stdlib/foo.pyi:1: error: "int" not callable +m.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly1] +# cmd: mypy $CWD/pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly2] +# cmd: mypy pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly3] +# cmd: mypy -p foo +# cwd: pypath +[file pypath/foo/__init__.py] +1() +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable +foo/__init__.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly4] +# cmd: mypy -m foo +# cwd: pypath +[file pypath/foo.py] +1() +[out] +foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly5] +# cmd: mypy -m foo.m +# cwd: pypath +[file pypath/foo/__init__.py] +1() # TODO: Maybe this should generate errors as well? But how would we decide? +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable From b18281c857a3e089740b6c6c03ff52a529c1b0ba Mon Sep 17 00:00:00 2001 From: Nick Drozd Date: Thu, 10 Nov 2022 14:20:38 -0600 Subject: [PATCH 030/292] Simplify boolean return logic in various places (#14012) Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- misc/fix_annotate.py | 9 +++--- mypy/build.py | 20 ++++++-------- mypy/checker.py | 24 ++++++---------- mypy/modulefinder.py | 23 +++++++--------- mypy/stubgen.py | 5 +--- mypy/subtypes.py | 34 ++++++++--------------- mypy/typeanal.py | 4 +-- mypy/types.py | 47 ++++++++++++++++---------------- mypyc/ir/class_ir.py | 5 +--- mypyc/irbuild/prebuildvisitor.py | 10 +++---- mypyc/irbuild/prepare.py | 23 ++++++++++------ 11 files changed, 86 insertions(+), 118 deletions(-) diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index b661a899924c..7fffba8a8507 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -213,8 +213,7 @@ def has_return_exprs(self, node): results = {} if self.return_expr.match(node, results): return True - for child in node.children: - if child.type not in (syms.funcdef, syms.classdef): - if self.has_return_exprs(child): - return True - return False + return any( + child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child) + for child in node.children + ) diff --git a/mypy/build.py b/mypy/build.py index 27dc1141ce28..62367c35915e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2728,11 +2728,8 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: else: parent_mod = parent_st.tree if parent_mod is not None: - if parent_mod.is_partial_stub_package: - return True - else: - # Bail out soon, complete subpackage found - return False + # Bail out soon, complete subpackage found + return parent_mod.is_partial_stub_package id = parent return False @@ -3580,9 +3577,10 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: set[str] def is_silent_import_module(manager: BuildManager, path: str) -> bool: - if not manager.options.no_silence_site_packages: - for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: - if is_sub_path(path, dir): - # Silence errors in site-package dirs and typeshed - return True - return False + if manager.options.no_silence_site_packages: + return False + # Silence errors in site-package dirs and typeshed + return any( + is_sub_path(path, dir) + for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path + ) diff --git a/mypy/checker.py b/mypy/checker.py index 67d132afe2c7..fef85d085496 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2292,9 +2292,7 @@ def is_final_enum_value(self, sym: SymbolTableNode) -> bool: ): return False - if self.is_stub or sym.node.has_explicit_value: - return True - return False + return self.is_stub or sym.node.has_explicit_value def check_enum_bases(self, defn: ClassDef) -> None: """ @@ -5978,10 +5976,7 @@ def store_type(self, node: Expression, typ: Type) -> None: self._type_maps[-1][node] = typ def has_type(self, node: Expression) -> bool: - for m in reversed(self._type_maps): - if node in m: - return True - return False + return any(node in m for m in reversed(self._type_maps)) def lookup_type_or_none(self, node: Expression) -> Type | None: for m in reversed(self._type_maps): @@ -6152,13 +6147,11 @@ def handle_partial_var_type( return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: - if var.info: - for base in var.info.mro[1:]: - if base.get(var.name) is not None: - return True - if var.info.fallback_to_any: - return True - return False + if not var.info: + return False + return var.info.fallback_to_any or any( + base.get(var.name) is not None for base in var.info.mro[1:] + ) def find_partial_types(self, var: Var) -> dict[Var, Context] | None: """Look for an active partial type scope containing variable. @@ -6354,8 +6347,7 @@ def is_writable_attribute(self, node: Node) -> bool: elif isinstance(node, OverloadedFuncDef) and node.is_property: first_item = cast(Decorator, node.items[0]) return first_item.var.is_settable_property - else: - return False + return False def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: if isinstance(expr, OpExpr) and expr.op == "|": diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index e64dba5ce29d..b2abb4847705 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -148,14 +148,11 @@ def __init__(self, sources: list[BuildSource]) -> None: self.source_modules[source.module] = source.path or "" def is_source(self, file: MypyFile) -> bool: - if file.path and file.path in self.source_paths: - return True - elif file._fullname in self.source_modules: - return True - elif self.source_text_present: - return True - else: - return False + return ( + (file.path and file.path in self.source_paths) + or file._fullname in self.source_modules + or self.source_text_present + ) class FindModuleCache: @@ -573,11 +570,11 @@ def _is_compatible_stub_package(self, stub_dir: str) -> bool: whether the stubs are compatible with Python 2 and 3. """ metadata_fnam = os.path.join(stub_dir, "METADATA.toml") - if os.path.isfile(metadata_fnam): - with open(metadata_fnam, "rb") as f: - metadata = tomllib.load(f) - return bool(metadata.get("python3", True)) - return True + if not os.path.isfile(metadata_fnam): + return True + with open(metadata_fnam, "rb") as f: + metadata = tomllib.load(f) + return bool(metadata.get("python3", True)) def find_modules_recursive(self, module: str) -> list[BuildSource]: module_path = self.find_module(module) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index fbae9ebaa252..8c7e24504270 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1309,10 +1309,7 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool: def is_private_member(self, fullname: str) -> bool: parts = fullname.split(".") - for part in parts: - if self.is_private_name(part): - return True - return False + return any(self.is_private_name(part) for part in parts) def get_str_type_of_node( self, rvalue: Expression, can_infer_optional: bool = False, can_be_any: bool = True diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 2724379ab878..9a4982f5b8ec 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -429,22 +429,18 @@ def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. - if self.subtype_context.keep_erased_types: - return False - return True + return not self.subtype_context.keep_erased_types def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any and not self.proper_subtype: - if isinstance(self.right, NoneType): - # NOTE: `None` is a *non-subclassable* singleton, therefore no class - # can by a subtype of it, even with an `Any` fallback. - # This special case is needed to treat descriptors in classes with - # dynamic base classes correctly, see #5456. - return False - return True + # NOTE: `None` is a *non-subclassable* singleton, therefore no class + # can by a subtype of it, even with an `Any` fallback. + # This special case is needed to treat descriptors in classes with + # dynamic base classes correctly, see #5456. + return not isinstance(self.right, NoneType) right = self.right if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) @@ -513,11 +509,7 @@ def check_mixed( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): - if not all( - is_equivalent(l, unpacked_type.args[0]) for l in compare_to - ): - return False - return True + return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to) if isinstance(unpacked_type, TypeVarTupleType): return False if isinstance(unpacked_type, AnyType): @@ -741,9 +733,8 @@ def visit_tuple_type(self, left: TupleType) -> bool: elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False - for l, r in zip(left.items, right.items): - if not self._is_subtype(l, r): - return False + if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): + return False rfallback = mypy.typeops.tuple_fallback(right) if is_named_instance(rfallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback @@ -752,9 +743,7 @@ def visit_tuple_type(self, left: TupleType) -> bool: # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True lfallback = mypy.typeops.tuple_fallback(left) - if not self._is_subtype(lfallback, rfallback): - return False - return True + return self._is_subtype(lfallback, rfallback) else: return False @@ -1368,8 +1357,7 @@ def g(x: int) -> int: ... unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False - else: - left = unified + left = unified # If we allow partial overlaps, we don't need to leave R generic: # if we can find even just a single typevar assignment which diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 35f60f54605a..55d819071a3a 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1729,9 +1729,7 @@ def __init__( def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False - if isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)): - return True - return False + return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: name = t.name diff --git a/mypy/types.py b/mypy/types.py index 2f0feb703f6a..a73c41904ea7 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -437,14 +437,12 @@ def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: - if isinstance(other, TypeVarId): - return ( - self.raw_id == other.raw_id - and self.meta_level == other.meta_level - and self.namespace == other.namespace - ) - else: - return False + return ( + isinstance(other, TypeVarId) + and self.raw_id == other.raw_id + and self.meta_level == other.meta_level + and self.namespace == other.namespace + ) def __ne__(self, other: object) -> bool: return not (self == other) @@ -910,9 +908,7 @@ def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: - if not isinstance(other, TypeList): - return False - return self.items == other.items + return isinstance(other, TypeList) and self.items == other.items class UnpackType(ProperType): @@ -2263,16 +2259,19 @@ def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: - if isinstance(other, TypedDictType): - if frozenset(self.items.keys()) != frozenset(other.items.keys()): - return False - for (_, left_item_type, right_item_type) in self.zip(other): - if not left_item_type == right_item_type: - return False - return self.fallback == other.fallback and self.required_keys == other.required_keys - else: + if not isinstance(other, TypedDictType): return NotImplemented + return ( + frozenset(self.items.keys()) == frozenset(other.items.keys()) + and all( + left_item_type == right_item_type + for (_, left_item_type, right_item_type) in self.zip(other) + ) + and self.fallback == other.fallback + and self.required_keys == other.required_keys + ) + def serialize(self) -> JsonDict: return { ".class": "TypedDictType", @@ -3352,11 +3351,11 @@ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value - if not isinstance(typ, LiteralType): - return False - if typ.fallback.type.fullname != fallback_fullname: - return False - return typ.value == value + return ( + isinstance(typ, LiteralType) + and typ.fallback.type.fullname == fallback_fullname + and typ.value == value + ) def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 7f55decfd754..f0f772306e60 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -265,10 +265,7 @@ def has_attr(self, name: str) -> bool: return True def is_deletable(self, name: str) -> bool: - for ir in self.mro: - if name in ir.deletable: - return True - return False + return any(name in ir.deletable for ir in self.mro) def is_always_defined(self, name: str) -> bool: if self.is_deletable(name): diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index 7d52dc8da57c..d99453955002 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -162,12 +162,10 @@ def visit_symbol_node(self, symbol: SymbolNode) -> None: def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: # Check if child is nested within fdef (possibly indirectly # within multiple nested functions). - if child in self.nested_funcs: - parent = self.nested_funcs[child] - if parent == fitem: - return True - return self.is_parent(fitem, parent) - return False + if child not in self.nested_funcs: + return False + parent = self.nested_funcs[child] + return parent == fitem or self.is_parent(fitem, parent) def add_free_variable(self, symbol: SymbolNode) -> None: # Find the function where the symbol was (likely) first declared, diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index dc153ea11561..639d1a5ea0d1 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -178,15 +178,20 @@ def prepare_method_def( def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: # Checks to ensure supported property decorator semantics - if len(prop.items) == 2: - getter = prop.items[0] - setter = prop.items[1] - if isinstance(getter, Decorator) and isinstance(setter, Decorator): - if getter.func.is_property and len(setter.decorators) == 1: - if isinstance(setter.decorators[0], MemberExpr): - if setter.decorators[0].name == "setter": - return True - return False + if len(prop.items) != 2: + return False + + getter = prop.items[0] + setter = prop.items[1] + + return ( + isinstance(getter, Decorator) + and isinstance(setter, Decorator) + and getter.func.is_property + and len(setter.decorators) == 1 + and isinstance(setter.decorators[0], MemberExpr) + and setter.decorators[0].name == "setter" + ) def can_subclass_builtin(builtin_base: str) -> bool: From a48dd5ad7ff820da7ea1e947008fff2865e2296a Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Nov 2022 15:37:07 -0800 Subject: [PATCH 031/292] Fix incompatible overrides of overloaded methods in concrete subclasses (#14017) Fixes #14002 --- mypy/checker.py | 17 ++++++ test-data/unit/check-selftype.test | 86 ++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index fef85d085496..2688a611b56a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1869,6 +1869,23 @@ def check_method_override_for_base_with_name( original_class_or_static = False # a variable can't be class or static if isinstance(original_type, FunctionLike): + active_self_type = self.scope.active_self_type() + if isinstance(original_type, Overloaded) and active_self_type: + # If we have an overload, filter to overloads that match the self type. + # This avoids false positives for concrete subclasses of generic classes, + # see testSelfTypeOverrideCompatibility for an example. + # It's possible we might want to do this as part of bind_and_map_method + filtered_items = [ + item + for item in original_type.items + if not item.arg_types or is_subtype(active_self_type, item.arg_types[0]) + ] + # If we don't have any filtered_items, maybe it's always a valid override + # of the superclass? However if you get to that point you're in murky type + # territory anyway, so we just preserve the type and have the behaviour match + # that of older versions of mypy. + if filtered_items: + original_type = Overloaded(filtered_items) original_type = self.bind_and_map_method(base_attr, original_type, defn.info, base) if original_node and is_property(original_node): original_type = get_property_type(original_type) diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index bfb0eb5a4d89..3d801d23a642 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -156,6 +156,92 @@ class B2(A[T]): def f(self: A[bytes]) -> bytes: ... def f(self): ... +class C(A[int]): + def f(self) -> int: ... + +class D(A[str]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + +class E(A[T]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + + +class F(A[bytes]): + # Note there's an argument to be made that this is actually compatible with the supertype + def f(self) -> bytes: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> bytes + +class G(A): + def f(self): ... + +class H(A[int]): + def f(self): ... + +class I(A[int]): + def f(*args): ... + +class J(A[int]): + def f(self, arg) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: Subclass: \ + # N: def f(self, arg: Any) -> int + +[builtins fixtures/tuple.pyi] + +[case testSelfTypeOverrideCompatibilityTypeVar-xfail] +from typing import overload, TypeVar, Union + +AT = TypeVar("AT", bound="A") + +class A: + @overload + def f(self: AT, x: int) -> AT: ... + @overload + def f(self, x: str) -> None: ... + @overload + def f(self: AT) -> bytes: ... + def f(*a, **kw): ... + +class B(A): + @overload # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None \ + # N: @overload \ + # N: def f(self) -> bytes \ + # N: Subclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None + def f(self, x: int) -> B: ... + @overload + def f(self, x: str) -> None: ... + def f(*a, **kw): ... +[builtins fixtures/dict.pyi] + [case testSelfTypeSuper] from typing import TypeVar, cast From 70e544b1fb848448fc702e95a84edfa5ab628d3c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 05:01:47 -0800 Subject: [PATCH 032/292] Update --no-warn-no-return docs for empty body changes (#14065) Fixes #14048 Co-authored-by: Jelle Zijlstra --- docs/source/command_line.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 83d2983472be..31d23db204eb 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -448,9 +448,10 @@ potentially problematic or redundant in some way. are when: - The function has a ``None`` or ``Any`` return type - - The function has an empty body or a body that is just - ellipsis (``...``). Empty functions are often used for - abstract methods. + - The function has an empty body and is marked as an abstract method, + is in a protocol class, or is in a stub file + - The execution path can never return; for example, if an exception + is always raised Passing in :option:`--no-warn-no-return` will disable these error messages in all cases. From f78d1fdc154d507353b34e7ea2037ef68de4e6fc Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 05:29:31 -0800 Subject: [PATCH 033/292] Fix another crash with report generation on namespace packages (#14063) Fixes #14046. Similar to #13733 Best reviewed with hide whitespace. --- mypy/report.py | 85 ++++++++++++++++++++++++-------------------------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/mypy/report.py b/mypy/report.py index 37b7497f1371..3fac2234c840 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -637,51 +637,48 @@ def on_file( etree.SubElement(class_element, "methods") lines_element = etree.SubElement(class_element, "lines") - with tokenize.open(path) as input_file: - class_lines_covered = 0 - class_total_lines = 0 - for lineno, _ in enumerate(input_file, 1): - status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) - hits = 0 - branch = False - if status == stats.TYPE_EMPTY: - continue - class_total_lines += 1 - if status != stats.TYPE_ANY: - class_lines_covered += 1 - hits = 1 - if status == stats.TYPE_IMPRECISE: - branch = True - file_info.counts[status] += 1 - line_element = etree.SubElement( - lines_element, - "line", - branch=str(branch).lower(), - hits=str(hits), - number=str(lineno), - precision=stats.precision_names[status], - ) - if branch: - line_element.attrib["condition-coverage"] = "50% (1/2)" - class_element.attrib["branch-rate"] = "0" - class_element.attrib["line-rate"] = get_line_rate( - class_lines_covered, class_total_lines + class_lines_covered = 0 + class_total_lines = 0 + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + hits = 0 + branch = False + if status == stats.TYPE_EMPTY: + continue + class_total_lines += 1 + if status != stats.TYPE_ANY: + class_lines_covered += 1 + hits = 1 + if status == stats.TYPE_IMPRECISE: + branch = True + file_info.counts[status] += 1 + line_element = etree.SubElement( + lines_element, + "line", + branch=str(branch).lower(), + hits=str(hits), + number=str(lineno), + precision=stats.precision_names[status], ) - # parent_module is set to whichever module contains this file. For most files, we want - # to simply strip the last element off of the module. But for __init__.py files, - # the module == the parent module. - parent_module = file_info.module.rsplit(".", 1)[0] - if file_info.name.endswith("__init__.py"): - parent_module = file_info.module - - if parent_module not in self.root_package.packages: - self.root_package.packages[parent_module] = CoberturaPackage(parent_module) - current_package = self.root_package.packages[parent_module] - packages_to_update = [self.root_package, current_package] - for package in packages_to_update: - package.total_lines += class_total_lines - package.covered_lines += class_lines_covered - current_package.classes[class_name] = class_element + if branch: + line_element.attrib["condition-coverage"] = "50% (1/2)" + class_element.attrib["branch-rate"] = "0" + class_element.attrib["line-rate"] = get_line_rate(class_lines_covered, class_total_lines) + # parent_module is set to whichever module contains this file. For most files, we want + # to simply strip the last element off of the module. But for __init__.py files, + # the module == the parent module. + parent_module = file_info.module.rsplit(".", 1)[0] + if file_info.name.endswith("__init__.py"): + parent_module = file_info.module + + if parent_module not in self.root_package.packages: + self.root_package.packages[parent_module] = CoberturaPackage(parent_module) + current_package = self.root_package.packages[parent_module] + packages_to_update = [self.root_package, current_package] + for package in packages_to_update: + package.total_lines += class_total_lines + package.covered_lines += class_lines_covered + current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib["line-rate"] = get_line_rate( From 67855fab376c3c2d68bcf0940b242c9b71a98156 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 12:08:29 -0800 Subject: [PATCH 034/292] Fix crash with function redefinition (#14064) Fixes #14027 (issue was surfaced by #13509) --- mypy/checker.py | 5 ++++- test-data/unit/check-functions.test | 14 ++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 2688a611b56a..e727c343aa14 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -960,7 +960,10 @@ def _visit_func_def(self, defn: FuncDef) -> None: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type - assert orig_type is not None, f"Error checking function redefinition {defn}" + if orig_type is None: + # If other branch is unreachable, we don't type check it and so we might + # not have a type for the original definition + return if isinstance(orig_type, PartialType): if orig_type.type is None: # Ah this is a partial type. Give it the type of the function. diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index bb36b65f35de..ae6424f743be 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -1475,6 +1475,20 @@ else: @dec def f(): pass +[case testConditionalFunctionDefinitionUnreachable] +def bar() -> None: + if False: + foo = 1 + else: + def foo(obj): ... + +def baz() -> None: + if False: + foo: int = 1 + else: + def foo(obj): ... # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "int") +[builtins fixtures/tuple.pyi] + [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass From 33d2b894bd2daff5e17074a37df9bdfe0a6c5518 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 13 Nov 2022 21:56:34 +0100 Subject: [PATCH 035/292] Add error-code for `truthy-iterable` (#13762) `Iterable` does not implement `__len__`, so passing a `Generator` would always evaluate to `true`. Suggest `Collection` as alternative. In most cases this isn't an issue as the function is often called with `list` anyway. However, the dedicated check can pinpoint subtle errors that might get unnoticed otherwise. --- docs/source/error_code_list2.rst | 28 ++++++++++++------------- mypy/checker.py | 8 +++++++ mypy/errorcodes.py | 6 ++++++ mypy/message_registry.py | 4 ++++ mypy/semanal.py | 6 ++++-- mypyc/test-data/irbuild-statements.test | 4 ++-- test-data/unit/check-errorcodes.test | 7 +++++++ 7 files changed, 44 insertions(+), 19 deletions(-) diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index cac19e705361..0a2d8a8c5c5c 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -231,31 +231,29 @@ since unless implemented by a sub-type, the expression will always evaluate to t if foo: ... +The check is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), +except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, +while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. -This check might falsely imply an error. For example, ``Iterable`` does not implement -``__len__`` and so this code will be flagged: -.. code-block:: python +Check that iterable is not implicitly true in boolean context [truthy-iterable] +------------------------------------------------------------------------------- - # Use "mypy -enable-error-code truthy-bool ..." +``Iterable`` does not implement ``__len__`` and so this code will be flagged: + +.. code-block:: python from typing import Iterable - def transform(items: Iterable[int]) -> Iterable[int]: - # Error: "items" has type "Iterable[int]" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + def transform(items: Iterable[int]) -> list[int]: + # Error: "items" has type "Iterable[int]" which can always be true in boolean context. Consider using "Collection[int]" instead. [truthy-iterable] if not items: return [42] return [x + 1 for x in items] - - -If called as ``transform((int(s) for s in []))``, this function would not return ``[42]`` unlike what the author -might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, and so there is -no error in practice. In such case, it might be prudent to annotate ``items: Sequence[int]``. - -This is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), -except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, -while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. +If called with a ``Generator`` like ``int(x) for x in []``, this function would not return ``[42]`` unlike +what the author might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, +and so there is no error in practice. In such case, it is recommended to annotate ``items: Collection[int]``. Check that function isn't used in boolean context [truthy-function] diff --git a/mypy/checker.py b/mypy/checker.py index e727c343aa14..aec2574e0ada 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5106,6 +5106,14 @@ def format_expr_type() -> str: self.fail(message_registry.FUNCTION_ALWAYS_TRUE.format(format_type(t)), expr) elif isinstance(t, UnionType): self.fail(message_registry.TYPE_ALWAYS_TRUE_UNIONTYPE.format(format_expr_type()), expr) + elif isinstance(t, Instance) and t.type.fullname == "typing.Iterable": + _, info = self.make_fake_typeinfo("typing", "Collection", "Collection", []) + self.fail( + message_registry.ITERABLE_ALWAYS_TRUE.format( + format_expr_type(), format_type(Instance(info, t.args)) + ), + expr, + ) else: self.fail(message_registry.TYPE_ALWAYS_TRUE.format(format_expr_type()), expr) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index f2a74c332b2e..3aee6881067e 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -160,6 +160,12 @@ def __str__(self) -> str: "Warn about function that always evaluate to true in boolean contexts", "General", ) +TRUTHY_ITERABLE: Final[ErrorCode] = ErrorCode( + "truthy-iterable", + "Warn about Iterable expressions that could always evaluate to true in boolean contexts", + "General", + default_enabled=False, +) NAME_MATCH: Final = ErrorCode( "name-match", "Check that type definition has consistent naming", "General" ) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 18acb2cd7a71..219c445497e9 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -153,6 +153,10 @@ def with_additional_msg(self, info: str) -> ErrorMessage: FUNCTION_ALWAYS_TRUE: Final = ErrorMessage( "Function {} could always be true in boolean context", code=codes.TRUTHY_FUNCTION ) +ITERABLE_ALWAYS_TRUE: Final = ErrorMessage( + "{} which can always be true in boolean context. Consider using {} instead.", + code=codes.TRUTHY_ITERABLE, +) NOT_CALLABLE: Final = "{} not callable" TYPE_MUST_BE_USED: Final = "Value of type {} must be used" diff --git a/mypy/semanal.py b/mypy/semanal.py index 77555648ba7e..ce88d033e01c 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -51,7 +51,7 @@ from __future__ import annotations from contextlib import contextmanager -from typing import Any, Callable, Iterable, Iterator, List, TypeVar, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, TypeVar, cast from typing_extensions import Final, TypeAlias as _TypeAlias from mypy import errorcodes as codes, message_registry @@ -6202,7 +6202,9 @@ def add_plugin_dependency(self, trigger: str, target: str | None = None) -> None target = self.scope.current_target() self.cur_mod_node.plugin_deps.setdefault(trigger, set()).add(target) - def add_type_alias_deps(self, aliases_used: Iterable[str], target: str | None = None) -> None: + def add_type_alias_deps( + self, aliases_used: Collection[str], target: str | None = None + ) -> None: """Add full names of type aliases on which the current node depends. This is used by fine-grained incremental mode to re-check the corresponding nodes. diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index ab947c956b74..090c7ed9f3df 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -1006,9 +1006,9 @@ L5: return 1 [case testForZip] -from typing import List, Iterable +from typing import List, Iterable, Sequence -def f(a: List[int], b: Iterable[bool]) -> None: +def f(a: List[int], b: Sequence[bool]) -> None: for x, y in zip(a, b): if b: x = 1 diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 81b8948be14a..798c52629a35 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -857,6 +857,13 @@ if not f: # E: Function "Callable[[], Any]" could always be true in boolean con pass conditional_result = 'foo' if f else 'bar' # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] +[case testTruthyIterable] +# flags: --strict-optional --enable-error-code truthy-iterable +from typing import Iterable +def func(var: Iterable[str]) -> None: + if var: # E: "var" has type "Iterable[str]" which can always be true in boolean context. Consider using "Collection[str]" instead. [truthy-iterable] + ... + [case testNoOverloadImplementation] from typing import overload From 47a435f38d96892b1f6a1fe543b0abe3ccca9c53 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 13 Nov 2022 20:59:44 +0000 Subject: [PATCH 036/292] Delete Python 2 test fixtures (#14083) It looks like these are not used anymore (we will see if tests pass). --- test-data/unit/fixtures/bool_py2.pyi | 16 ----- test-data/unit/fixtures/floatdict_python2.pyi | 68 ------------------- .../unit/fixtures/module_all_python2.pyi | 15 ---- test-data/unit/fixtures/property_py2.pyi | 21 ------ test-data/unit/fixtures/python2.pyi | 38 ----------- 5 files changed, 158 deletions(-) delete mode 100644 test-data/unit/fixtures/bool_py2.pyi delete mode 100644 test-data/unit/fixtures/floatdict_python2.pyi delete mode 100644 test-data/unit/fixtures/module_all_python2.pyi delete mode 100644 test-data/unit/fixtures/property_py2.pyi delete mode 100644 test-data/unit/fixtures/python2.pyi diff --git a/test-data/unit/fixtures/bool_py2.pyi b/test-data/unit/fixtures/bool_py2.pyi deleted file mode 100644 index b2c935132d57..000000000000 --- a/test-data/unit/fixtures/bool_py2.pyi +++ /dev/null @@ -1,16 +0,0 @@ -# builtins stub used in boolean-related test cases. -from typing import Generic, TypeVar -import sys -T = TypeVar('T') - -class object: - def __init__(self) -> None: pass - -class type: pass -class tuple(Generic[T]): pass -class function: pass -class bool: pass -class int: pass -class str: pass -class unicode: pass -class ellipsis: pass diff --git a/test-data/unit/fixtures/floatdict_python2.pyi b/test-data/unit/fixtures/floatdict_python2.pyi deleted file mode 100644 index f177355d5d4b..000000000000 --- a/test-data/unit/fixtures/floatdict_python2.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union - -T = TypeVar('T') -KT = TypeVar('KT') -VT = TypeVar('VT') - -Any = 0 - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: Any) -> None: pass - -class str: - def __add__(self, other: 'str') -> 'str': pass - def __rmul__(self, n: int) -> str: ... - -class unicode: pass - -class tuple(Generic[T]): pass -class slice: pass -class function: pass - -class ellipsis: pass - -class list(Iterable[T], Generic[T]): - @overload - def __init__(self) -> None: pass - @overload - def __init__(self, x: Iterable[T]) -> None: pass - def __iter__(self) -> Iterator[T]: pass - def __add__(self, x: list[T]) -> list[T]: pass - def __mul__(self, x: int) -> list[T]: pass - def __getitem__(self, x: int) -> T: pass - def append(self, x: T) -> None: pass - def extend(self, x: Iterable[T]) -> None: pass - -class dict(Mapping[KT, VT], Generic[KT, VT]): - @overload - def __init__(self, **kwargs: VT) -> None: pass - @overload - def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass - def __setitem__(self, k: KT, v: VT) -> None: pass - def __getitem__(self, k: KT) -> VT: pass - def __iter__(self) -> Iterator[KT]: pass - def update(self, a: Mapping[KT, VT]) -> None: pass - @overload - def get(self, k: KT) -> Optional[VT]: pass - @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass - - -class int: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: int) -> int: ... - def __rmul__(self, x: int) -> int: ... - def __truediv__(self, x: int) -> int: ... - def __rtruediv__(self, x: int) -> int: ... - -class float: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: float) -> float: ... - def __rmul__(self, x: float) -> float: ... - def __truediv__(self, x: float) -> float: ... - def __rtruediv__(self, x: float) -> float: ... diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi deleted file mode 100644 index 989333c5f41a..000000000000 --- a/test-data/unit/fixtures/module_all_python2.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Generic, Sequence, TypeVar -_T = TypeVar('_T') - -class object: - def __init__(self) -> None: pass -class type: pass -class function: pass -class int: pass -class str: pass -class unicode: pass -class list(Generic[_T], Sequence[_T]): - def append(self, x: _T): pass - def extend(self, x: Sequence[_T]): pass - def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass -class tuple(Generic[_T]): pass diff --git a/test-data/unit/fixtures/property_py2.pyi b/test-data/unit/fixtures/property_py2.pyi deleted file mode 100644 index 3b0ab69cf43f..000000000000 --- a/test-data/unit/fixtures/property_py2.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import typing - -_T = typing.TypeVar('_T') - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: typing.Any) -> None: pass - -class function: pass - -property = object() # Dummy definition - -class int: pass -class str: pass -class unicode: pass -class bool: pass -class ellipsis: pass - -class tuple(typing.Generic[_T]): pass diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi deleted file mode 100644 index 51af59c8bd45..000000000000 --- a/test-data/unit/fixtures/python2.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Generic, Iterable, TypeVar, Sequence, Iterator - -class object: - def __init__(self) -> None: pass - def __eq__(self, other: object) -> bool: pass - def __ne__(self, other: object) -> bool: pass - -class type: - def __init__(self, x) -> None: pass - -class function: pass - -class int: pass -class float: pass -class str: - def format(self, *args, **kwars) -> str: ... -class unicode: - def format(self, *args, **kwars) -> unicode: ... -class bool(int): pass - -bytes = str - -T = TypeVar('T') -S = TypeVar('S') -class list(Iterable[T], Generic[T]): - def __iter__(self) -> Iterator[T]: pass - def __getitem__(self, item: int) -> T: pass -class tuple(Iterable[T]): - def __iter__(self) -> Iterator[T]: pass -class dict(Generic[T, S]): pass - -class bytearray(Sequence[int]): - def __init__(self, string: str) -> None: pass - def __contains__(self, item: object) -> bool: pass - def __iter__(self) -> Iterator[int]: pass - def __getitem__(self, item: int) -> int: pass - -# Definition of None is implicit From 57ce73d4a39d3293eaac43c7a950e2c1ac30e2c9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 13 Nov 2022 23:03:24 +0000 Subject: [PATCH 037/292] Support additinal attributes in callback protocols (#14084) Fixes https://github.com/python/mypy/issues/10976 Fixes https://github.com/python/mypy/issues/10403 This is quite straightforward. Note that we will not allow _arbitrary_ attributes on functions, only those that are defined in `types.FunctionType` (or more precisely `builtins.function` that is identical). We have a separate issue for arbitrary attributes https://github.com/python/mypy/issues/2087 --- mypy/checker.py | 7 ++-- mypy/constraints.py | 9 +++-- mypy/messages.py | 27 ++++++++------ mypy/subtypes.py | 25 ++++++++++--- mypy/test/testtypegen.py | 4 +- test-data/unit/check-protocols.test | 47 ++++++++++++++++++++++++ test-data/unit/fine-grained-inspect.test | 4 +- test-data/unit/fixtures/tuple.pyi | 3 +- test-data/unit/lib-stub/builtins.pyi | 3 +- 9 files changed, 100 insertions(+), 29 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index aec2574e0ada..8b1c8d3464fb 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5883,7 +5883,7 @@ def check_subtype( if ( isinstance(supertype, Instance) and supertype.type.is_protocol - and isinstance(subtype, (Instance, TupleType, TypedDictType)) + and isinstance(subtype, (CallableType, Instance, TupleType, TypedDictType)) ): self.msg.report_protocol_problems(subtype, supertype, context, code=msg.code) if isinstance(supertype, CallableType) and isinstance(subtype, Instance): @@ -5891,10 +5891,11 @@ def check_subtype( if call: self.msg.note_call(subtype, call, context, code=msg.code) if isinstance(subtype, (CallableType, Overloaded)) and isinstance(supertype, Instance): - if supertype.type.is_protocol and supertype.type.protocol_members == ["__call__"]: + if supertype.type.is_protocol and "__call__" in supertype.type.protocol_members: call = find_member("__call__", supertype, subtype, is_operator=True) assert call is not None - self.msg.note_call(supertype, call, context, code=msg.code) + if not is_subtype(subtype, call, options=self.options): + self.msg.note_call(supertype, call, context, code=msg.code) self.check_possible_missing_await(subtype, supertype, context) return False diff --git a/mypy/constraints.py b/mypy/constraints.py index 2a641bf27ed5..7123c590b7ef 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -553,7 +553,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: original_actual = actual = self.actual res: list[Constraint] = [] if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: - if template.type.protocol_members == ["__call__"]: + if "__call__" in template.type.protocol_members: # Special case: a generic callback protocol if not any(template == t for t in template.type.inferring): template.type.inferring.append(template) @@ -565,7 +565,6 @@ def visit_instance(self, template: Instance) -> list[Constraint]: subres = infer_constraints(call, actual, self.direction) res.extend(subres) template.type.inferring.pop() - return res if isinstance(actual, CallableType) and actual.fallback is not None: if actual.is_type_obj() and template.type.is_protocol: ret_type = get_proper_type(actual.ret_type) @@ -815,7 +814,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: # because some type may be considered a subtype of a protocol # due to _promote, but still not implement the protocol. not any(template == t for t in reversed(template.type.inferring)) - and mypy.subtypes.is_protocol_implementation(instance, erased) + and mypy.subtypes.is_protocol_implementation(instance, erased, skip=["__call__"]) ): template.type.inferring.append(template) res.extend( @@ -831,7 +830,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: and # We avoid infinite recursion for structural subtypes also here. not any(instance == i for i in reversed(instance.type.inferring)) - and mypy.subtypes.is_protocol_implementation(erased, instance) + and mypy.subtypes.is_protocol_implementation(erased, instance, skip=["__call__"]) ): instance.type.inferring.append(instance) res.extend( @@ -887,6 +886,8 @@ def infer_constraints_from_protocol_members( inst = mypy.subtypes.find_member(member, instance, subtype, class_obj=class_obj) temp = mypy.subtypes.find_member(member, template, subtype) if inst is None or temp is None: + if member == "__call__": + continue return [] # See #11020 # The above is safe since at this point we know that 'instance' is a subtype # of (erased) 'template', therefore it defines all protocol members diff --git a/mypy/messages.py b/mypy/messages.py index e11ee9d0f7f2..75871d9b5521 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1866,6 +1866,7 @@ def report_protocol_problems( class_obj = False is_module = False + skip = [] if isinstance(subtype, TupleType): if not isinstance(subtype.partial_fallback, Instance): return @@ -1880,20 +1881,22 @@ def report_protocol_problems( class_obj = True subtype = subtype.item elif isinstance(subtype, CallableType): - if not subtype.is_type_obj(): - return - ret_type = get_proper_type(subtype.ret_type) - if isinstance(ret_type, TupleType): - ret_type = ret_type.partial_fallback - if not isinstance(ret_type, Instance): - return - class_obj = True - subtype = ret_type + if subtype.is_type_obj(): + ret_type = get_proper_type(subtype.ret_type) + if isinstance(ret_type, TupleType): + ret_type = ret_type.partial_fallback + if not isinstance(ret_type, Instance): + return + class_obj = True + subtype = ret_type + else: + subtype = subtype.fallback + skip = ["__call__"] if subtype.extra_attrs and subtype.extra_attrs.mod_name: is_module = True # Report missing members - missing = get_missing_protocol_members(subtype, supertype) + missing = get_missing_protocol_members(subtype, supertype, skip=skip) if ( missing and len(missing) < len(supertype.type.protocol_members) @@ -2605,13 +2608,15 @@ def variance_string(variance: int) -> str: return "invariant" -def get_missing_protocol_members(left: Instance, right: Instance) -> list[str]: +def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str]) -> list[str]: """Find all protocol members of 'right' that are not implemented (i.e. completely missing) in 'left'. """ assert right.type.is_protocol missing: list[str] = [] for member in right.type.protocol_members: + if member in skip: + continue if not find_member(member, left, left): missing.append(member) return missing diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 9a4982f5b8ec..f928e1cc7918 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -678,13 +678,16 @@ def visit_callable_type(self, left: CallableType) -> bool: elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items) elif isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: - # OK, a callable can implement a protocol with a single `__call__` member. + if right.type.is_protocol and "__call__" in right.type.protocol_members: + # OK, a callable can implement a protocol with a `__call__` member. # TODO: we should probably explicitly exclude self-types in this case. call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): - return True + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True if right.type.is_protocol and left.is_type_obj(): ret_type = get_proper_type(left.ret_type) if isinstance(ret_type, TupleType): @@ -792,12 +795,15 @@ def visit_literal_type(self, left: LiteralType) -> bool: def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: + if right.type.is_protocol and "__call__" in right.type.protocol_members: # same as for CallableType call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): - return True + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True return self._is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items: @@ -938,7 +944,11 @@ def pop_on_exit(stack: list[tuple[T, T]], left: T, right: T) -> Iterator[None]: def is_protocol_implementation( - left: Instance, right: Instance, proper_subtype: bool = False, class_obj: bool = False + left: Instance, + right: Instance, + proper_subtype: bool = False, + class_obj: bool = False, + skip: list[str] | None = None, ) -> bool: """Check whether 'left' implements the protocol 'right'. @@ -958,10 +968,13 @@ def f(self) -> A: ... as well. """ assert right.type.is_protocol + if skip is None: + skip = [] # We need to record this check to generate protocol fine-grained dependencies. TypeState.record_protocol_subtype_check(left.type, right.type) # nominal subtyping currently ignores '__init__' and '__new__' signatures members_not_to_check = {"__init__", "__new__"} + members_not_to_check.update(skip) # Trivial check that circumvents the bug described in issue 9771: if left.type.is_protocol: members_right = set(right.type.protocol_members) - members_not_to_check diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py index db155a337980..22ef4272e933 100644 --- a/mypy/test/testtypegen.py +++ b/mypy/test/testtypegen.py @@ -7,7 +7,7 @@ from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource -from mypy.nodes import NameExpr +from mypy.nodes import NameExpr, TempNode from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite @@ -54,6 +54,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: # Filter nodes that should be included in the output. keys = [] for node in nodes: + if isinstance(node, TempNode): + continue if node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 113b2000fc22..77c14b92b261 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -2642,6 +2642,53 @@ reveal_type([b, a]) # N: Revealed type is "builtins.list[def (x: def (__main__. [builtins fixtures/list.pyi] [out] +[case testCallbackProtocolFunctionAttributesSubtyping] +from typing import Protocol + +class A(Protocol): + __name__: str + def __call__(self) -> str: ... + +class B1(Protocol): + __name__: int + def __call__(self) -> str: ... + +class B2(Protocol): + __name__: str + def __call__(self) -> int: ... + +class B3(Protocol): + __name__: str + extra_stuff: int + def __call__(self) -> str: ... + +def f() -> str: ... + +reveal_type(f.__name__) # N: Revealed type is "builtins.str" +a: A = f # OK +b1: B1 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B1") \ + # N: Following member(s) of "function" have conflicts: \ + # N: __name__: expected "int", got "str" +b2: B2 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B2") \ + # N: "B2.__call__" has type "Callable[[], int]" +b3: B3 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B3") \ + # N: "function" is missing following "B3" protocol member: \ + # N: extra_stuff + +[case testCallbackProtocolFunctionAttributesInference] +from typing import Protocol, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S", covariant=True) +class A(Protocol[T, S]): + __name__: T + def __call__(self) -> S: ... + +def f() -> int: ... +def test(func: A[T, S]) -> Tuple[T, S]: ... +reveal_type(test(f)) # N: Revealed type is "Tuple[builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + [case testProtocolsAlwaysABCs] from typing import Protocol diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test index a52db3959633..8574477d8272 100644 --- a/test-data/unit/fine-grained-inspect.test +++ b/test-data/unit/fine-grained-inspect.test @@ -52,8 +52,8 @@ class Meta(type): == {"C": ["meth", "x"]} {"C": ["meth", "x"], "Meta": ["y"], "type": ["__init__"]} -{} -{"object": ["__init__"]} +{"function": ["__name__"]} +{"function": ["__name__"], "object": ["__init__"]} [case testInspectDefBasic] # inspect2: --show=definition foo.py:5:5 diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 5c69a4ad1eb5..14e668375175 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -23,7 +23,8 @@ class tuple(Sequence[Tco], Generic[Tco]): def __rmul__(self, n: int) -> Tuple[Tco, ...]: pass def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass def count(self, obj: object) -> int: pass -class function: pass +class function: + __name__: str class ellipsis: pass class classmethod: pass diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 8c4f504fb2e7..82e0f6135614 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -17,7 +17,8 @@ class float: pass class str: pass class bytes: pass -class function: pass +class function: + __name__: str class ellipsis: pass from typing import Generic, Sequence, TypeVar From cf59b82996db6bbe5c6535f338ba1fac22d67975 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 14 Nov 2022 08:43:11 +0000 Subject: [PATCH 038/292] Fix crash on partial type inference within a lambda (#14087) Fixes #9654 Seems to be quite straightforward. Erased types should never be stored on variables, it is just a temporary thing for nested generic calls. --- mypy/checker.py | 15 ++++++++++++--- test-data/unit/check-incremental.test | 26 ++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 3 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 8b1c8d3464fb..c104a75e8cd5 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -178,6 +178,7 @@ AnyType, CallableType, DeletedType, + ErasedType, FunctionLike, Instance, LiteralType, @@ -7040,11 +7041,15 @@ def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: return is_lvalue_final elif isinstance(proper_type, UninhabitedType): return False - return not typ.accept(NothingSeeker()) + return not typ.accept(InvalidInferredTypes()) -class NothingSeeker(TypeQuery[bool]): - """Find any types resulting from failed (ambiguous) type inference.""" +class InvalidInferredTypes(TypeQuery[bool]): + """Find type components that are not valid for an inferred type. + + These include type, and any types resulting from failed + (ambiguous) type inference. + """ def __init__(self) -> None: super().__init__(any) @@ -7052,6 +7057,10 @@ def __init__(self) -> None: def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return t.ambiguous + def visit_erased_type(self, t: ErasedType) -> bool: + # This can happen inside a lambda. + return True + class SetNothingToAny(TypeTranslator): """Replace all ambiguous types with Any (to avoid spurious extra errors).""" diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 3ec0ed2c63f5..131cd039a467 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6286,3 +6286,29 @@ class C: ... [out] [out2] [out3] + +[case testNoCrashOnPartialLambdaInference] +import m +[file m.py] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +[file m.py.2] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +reveal_type(xs) +[builtins fixtures/list.pyi] +[out] +[out2] +tmp/m.py:9: note: Revealed type is "builtins.list[builtins.int]" From dd0503e28d9f3dbfa8fa74e5bfdb4ce438646a80 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 14 Nov 2022 11:04:18 +0000 Subject: [PATCH 039/292] Don't consider a branch unreachable if there is possible promotion (#14077) Fixes #14030 FWIW this looks like an acceptable compromise after discussions in the issue. Also it is easy to implement. Let's see what `mypy_primer` will show. --- mypy/checker.py | 31 ++++-- mypy/join.py | 14 ++- mypy/meet.py | 10 +- test-data/unit/check-classes.test | 2 +- test-data/unit/check-isinstance.test | 2 +- test-data/unit/check-type-promotion.test | 133 +++++++++++++++++++++++ test-data/unit/fixtures/primitives.pyi | 8 +- 7 files changed, 180 insertions(+), 20 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index c104a75e8cd5..ea7f46af5adb 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4824,7 +4824,7 @@ def make_fake_typeinfo( return cdef, info def intersect_instances( - self, instances: tuple[Instance, Instance], ctx: Context + self, instances: tuple[Instance, Instance], errors: list[tuple[str, str]] ) -> Instance | None: """Try creating an ad-hoc intersection of the given instances. @@ -4851,6 +4851,17 @@ def intersect_instances( curr_module = self.scope.stack[0] assert isinstance(curr_module, MypyFile) + # First, retry narrowing while allowing promotions (they are disabled by default + # for isinstance() checks, etc). This way we will still type-check branches like + # x: complex = 1 + # if isinstance(x, int): + # ... + left, right = instances + if is_proper_subtype(left, right, ignore_promotions=False): + return left + if is_proper_subtype(right, left, ignore_promotions=False): + return right + def _get_base_classes(instances_: tuple[Instance, Instance]) -> list[Instance]: base_classes_ = [] for inst in instances_: @@ -4891,17 +4902,10 @@ def _make_fake_typeinfo_and_full_name( self.check_multiple_inheritance(info) info.is_intersection = True except MroError: - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "inconsistent method resolution order", ctx - ) + errors.append((pretty_names_list, "inconsistent method resolution order")) return None - if local_errors.has_new_errors(): - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "incompatible method signatures", ctx - ) + errors.append((pretty_names_list, "incompatible method signatures")) return None curr_module.names[full_name] = SymbolTableNode(GDEF, info) @@ -6355,15 +6359,20 @@ def conditional_types_with_intersection( possible_target_types.append(item) out = [] + errors: list[tuple[str, str]] = [] for v in possible_expr_types: if not isinstance(v, Instance): return yes_type, no_type for t in possible_target_types: - intersection = self.intersect_instances((v, t), ctx) + intersection = self.intersect_instances((v, t), errors) if intersection is None: continue out.append(intersection) if len(out) == 0: + # Only report errors if no element in the union worked. + if self.should_report_unreachable_issues(): + for types, reason in errors: + self.msg.impossible_intersection(types, reason, ctx) return UninhabitedType(), expr_type new_yes_type = make_simplified_union(out) return new_yes_type, expr_type diff --git a/mypy/join.py b/mypy/join.py index d54febd7462a..84aa03f8eeba 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -141,8 +141,11 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: - """Return a simple least upper bound given the declared type.""" - # TODO: check infinite recursion for aliases here? + """Return a simple least upper bound given the declared type. + + This function should be only used by binder, and should not recurse. + For all other uses, use `join_types()`. + """ declaration = get_proper_type(declaration) s = get_proper_type(s) t = get_proper_type(t) @@ -158,10 +161,10 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: if isinstance(s, ErasedType): return t - if is_proper_subtype(s, t): + if is_proper_subtype(s, t, ignore_promotions=True): return t - if is_proper_subtype(t, s): + if is_proper_subtype(t, s, ignore_promotions=True): return s if isinstance(declaration, UnionType): @@ -176,6 +179,9 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: # Meets/joins require callable type normalization. s, t = normalize_callables(s, t) + if isinstance(s, UnionType) and not isinstance(t, UnionType): + s, t = t, s + value = t.accept(TypeJoinVisitor(s)) if declaration is None or is_subtype(value, declaration): return value diff --git a/mypy/meet.py b/mypy/meet.py index 3e772419ef3e..f5cd4c1208da 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -124,7 +124,15 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: [ narrow_declared_type(x, narrowed) for x in declared.relevant_items() - if is_overlapping_types(x, narrowed, ignore_promotions=True) + # This (ugly) special-casing is needed to support checking + # branches like this: + # x: Union[float, complex] + # if isinstance(x, int): + # ... + if ( + is_overlapping_types(x, narrowed, ignore_promotions=True) + or is_subtype(narrowed, x, ignore_promotions=False) + ) ] ) if is_enum_overlapping_union(declared, narrowed): diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 42aaa68b5873..33208c081c28 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7209,7 +7209,7 @@ from typing import Callable class C: x: Callable[[C], int] = lambda x: x.y.g() # E: "C" has no attribute "y" -[case testOpWithInheritedFromAny] +[case testOpWithInheritedFromAny-xfail] from typing import Any C: Any class D(C): diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 046a4fc43537..6eddcd866cab 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -2392,7 +2392,7 @@ class B: x1: Literal[1] = self.f() def t2(self) -> None: - if isinstance(self, (A0, A1)): # E: Subclass of "B" and "A0" cannot exist: would have incompatible method signatures + if isinstance(self, (A0, A1)): reveal_type(self) # N: Revealed type is "__main__.1" x0: Literal[0] = self.f() # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[0]") x1: Literal[1] = self.f() diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test index f477a9f2b390..e66153726e7d 100644 --- a/test-data/unit/check-type-promotion.test +++ b/test-data/unit/check-type-promotion.test @@ -54,3 +54,136 @@ def f(x: Union[SupportsFloat, T]) -> Union[SupportsFloat, T]: pass f(0) # should not crash [builtins fixtures/primitives.pyi] [out] + +[case testIntersectionUsingPromotion1] +# flags: --warn-unreachable +from typing import Union + +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +reveal_type(x) # N: Revealed type is "builtins.complex" + +y: Union[int, float] +if isinstance(y, float): + reveal_type(y) # N: Revealed type is "builtins.float" +else: + reveal_type(y) # N: Revealed type is "builtins.int" + +reveal_type(y) # N: Revealed type is "Union[builtins.int, builtins.float]" + +if isinstance(y, int): + reveal_type(y) # N: Revealed type is "builtins.int" +else: + reveal_type(y) # N: Revealed type is "builtins.float" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion2] +# flags: --warn-unreachable +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" + +# Note we make type precise, since type promotions are involved +reveal_type(x) # N: Revealed type is "Union[builtins.complex, builtins.int, builtins.float]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion3] +# flags: --warn-unreachable +x: object +if isinstance(x, int) and isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex) and isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion4] +# flags: --warn-unreachable +x: object +if isinstance(x, int): + if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex): + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.complex" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion5] +# flags: --warn-unreachable +from typing import Union + +x: Union[float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion6] +# flags: --warn-unreachable +from typing import Union + +x: Union[str, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.int, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion7] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, float): + reveal_type(x) # N: Revealed type is "builtins.float" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.complex" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion8] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.float" +if isinstance(x, (float, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index 9553df4b40c7..90d76b9d76dd 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -1,5 +1,5 @@ # builtins stub with non-generic primitive types -from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, overload +from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, Tuple, Union T = TypeVar('T') V = TypeVar('V') @@ -20,7 +20,9 @@ class int: def __rmul__(self, x: int) -> int: pass class float: def __float__(self) -> float: pass -class complex: pass + def __add__(self, x: float) -> float: pass +class complex: + def __add__(self, x: complex) -> complex: pass class bool(int): pass class str(Sequence[str]): def __add__(self, s: str) -> str: pass @@ -63,3 +65,5 @@ class range(Sequence[int]): def __getitem__(self, i: int) -> int: pass def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass + +def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass From e0a37fa2d9654fae61799c09b5c84630309f8735 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 14 Nov 2022 16:07:13 -0800 Subject: [PATCH 040/292] Fix crashes with unpacking SyntaxError (#11499) In general, mypy doesn't promise to be able to check the remainder of your code in the presence of syntax errors, so just make this a blocking error. Fixes #9137 Fixes #3825 (most of the reports in this issue were fixed by #8827) Co-authored-by: hauntsaninja <> --- mypy/semanal.py | 5 +++-- test-data/unit/check-tuples.test | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index ce88d033e01c..46ae4b26de3e 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1683,6 +1683,8 @@ class Foo(Bar, Generic[T]): ... declared_tvars: TypeVarLikeList = [] is_protocol = False for i, base_expr in enumerate(base_type_exprs): + if isinstance(base_expr, StarExpr): + base_expr.valid = True self.analyze_type_expr(base_expr) try: @@ -4539,8 +4541,7 @@ def visit_dict_expr(self, expr: DictExpr) -> None: def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: - # XXX TODO Change this error message - self.fail("Can use starred expression only as assignment target", expr) + self.fail("Can use starred expression only as assignment target", expr, blocker=True) else: expr.expr.accept(self) diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 061a4bcfa48d..cdb27d10fe0c 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -972,6 +972,17 @@ b = (1, 'x') a = (0, *b, '') [builtins fixtures/tuple.pyi] +[case testUnpackSyntaxError] +*foo # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] + +[case testUnpackBases] +class A: ... +class B: ... +bases = (A, B) +class C(*bases): ... # E: Invalid base class +[builtins fixtures/tuple.pyi] + [case testTupleMeetTupleAny] from typing import Union, Tuple class A: pass From 77dd4b4df5b8bcd716352144feeb78862427f4dd Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 15 Nov 2022 22:03:36 +0000 Subject: [PATCH 041/292] Add support for Self type (#14041) Ref #12840 Fixes #11871 Fixes #14089 This is an alternative implementation to two existing PRs: https://github.com/python/mypy/pull/11666, https://github.com/python/mypy/pull/13133. This PR treats `typing.Self` as pure syntactic sugar, and transforms it into a type variable early during semantic analyzis. This way we can re-use all the existing machinery and handled edge cases for self-types. The only new thing is self-type for _attributes_ (as proposed in the PEP). This required handling in several places, since attribute access is duplicated in several places (see #7724), plus special forms (like NamedTuples and TypedDicts) and dataclasses plugin require additional care, since they use attribute annotations in special ways. I don't copy all the existing tests for "old style" self-types, but only some common use cases, possible error conditions, and relevant new edge cases, such as e.g. special forms mentioned above, and implicit type variable binding for callable types. --- docs/source/error_code_list2.rst | 19 + docs/source/generics.rst | 74 +++- docs/source/more_types.rst | 3 +- mypy/checker.py | 8 +- mypy/checkexpr.py | 5 + mypy/checkmember.py | 27 +- mypy/errorcodes.py | 6 + mypy/expandtype.py | 19 +- mypy/message_registry.py | 1 + mypy/nodes.py | 8 + mypy/plugins/dataclasses.py | 38 +- mypy/semanal.py | 110 ++++- mypy/semanal_namedtuple.py | 2 + mypy/semanal_shared.py | 6 + mypy/semanal_typeddict.py | 2 + mypy/subtypes.py | 4 +- mypy/typeanal.py | 55 ++- mypy/types.py | 18 +- test-data/unit/check-dataclasses.test | 25 ++ test-data/unit/check-incremental.test | 22 + test-data/unit/check-namedtuple.test | 29 ++ test-data/unit/check-protocols.test | 51 +++ test-data/unit/check-selftype.test | 383 +++++++++++++++++- test-data/unit/check-typeddict.test | 11 + test-data/unit/fine-grained.test | 25 ++ test-data/unit/fixtures/typing-namedtuple.pyi | 1 + test-data/unit/fixtures/typing-typeddict.pyi | 1 + test-data/unit/lib-stub/typing.pyi | 1 + 28 files changed, 897 insertions(+), 57 deletions(-) diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 0a2d8a8c5c5c..0cf96ba9c2e7 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -82,6 +82,25 @@ Example: # Error: Redundant cast to "int" [redundant-cast] return cast(int, x) +Check that methods do not have redundant Self annotations [redundant-self] +-------------------------------------------------------------------------- + +Such annotations are allowed by :pep:`673` but are redundant, so if you want +warnings about them, enable this error code. + +Example: + +.. code-block:: python + + # mypy: enable-error-code="redundant-self" + + from typing import Self + + class C: + # Error: Redundant Self annotation on method first argument + def copy(self: Self) -> Self: + return type(self)() + Check that comparisons are overlapping [comparison-overlap] ----------------------------------------------------------- diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 3ae616f78691..59d4aa1a2dea 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -264,15 +264,8 @@ Generic methods and generic self You can also define generic methods — just use a type variable in the method signature that is different from class type variables. In particular, ``self`` may also be generic, allowing a method to return the most precise -type known at the point of access. - -.. note:: - - This feature is experimental. Checking code with type annotations for self - arguments is still not fully implemented. Mypy may disallow valid code or - allow unsafe code. - -In this way, for example, you can typecheck chaining of setter methods: +type known at the point of access. In this way, for example, you can typecheck +chaining of setter methods: .. code-block:: python @@ -333,8 +326,69 @@ or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), possibly by making use of the ``Any`` type. +Note that this feature may accept some unsafe code for the purpose of +*practicality*. For example: + +.. code-block:: python + + from typing import TypeVar + + T = TypeVar("T") + class Base: + def compare(self: T, other: T) -> bool: + return False + + class Sub(Base): + def __init__(self, x: int) -> None: + self.x = x + + # This is unsafe (see below), but allowed because it is + # a common pattern, and rarely causes issues in practice. + def compare(self, other: Sub) -> bool: + return self.x > other.x + + b: Base = Sub(42) + b.compare(Base()) # Runtime error here: 'Base' object has no attribute 'x' + For some advanced uses of self-types see :ref:`additional examples `. +Automatic self types using typing.Self +************************************** + +The patterns described above are quite common, so there is a syntactic sugar +for them introduced in :pep:`673`. Instead of defining a type variable and +using an explicit ``self`` annotation, you can import a magic type ``typing.Self`` +that is automatically transformed into a type variable with an upper bound of +current class, and you don't need an annotation for ``self`` (or ``cls`` for +class methods). The above example can thus be rewritten as: + +.. code-block:: python + + from typing import Self + + class Friend: + other: Self | None = None + + @classmethod + def make_pair(cls) -> tuple[Self, Self]: + a, b = cls(), cls() + a.other = b + b.other = a + return a, b + + class SuperFriend(Friend): + pass + + a, b = SuperFriend.make_pair() + +This is more compact than using explicit type variables, plus additionally +you can use ``Self`` in attribute annotations, not just in methods. + +.. note:: + + To use this feature on versions of Python before 3.11, you will need to + import ``Self`` from ``typing_extensions`` version 4.0 or newer. + .. _variance-of-generics: Variance of generic types @@ -548,7 +602,7 @@ Note that class decorators are handled differently than function decorators in mypy: decorating a class does not erase its type, even if the decorator has incomplete type annotations. -Suppose we have the following decorator, not type annotated yet, +Suppose we have the following decorator, not type annotated yet, that preserves the original function's signature and merely prints the decorated function's name: .. code-block:: python diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 707411e95fef..722909a038b5 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -804,9 +804,10 @@ classes are generic, self-type allows giving them precise signatures: .. code-block:: python T = TypeVar('T') - Q = TypeVar('Q', bound='Base[Any]') class Base(Generic[T]): + Q = TypeVar('Q', bound='Base[T]') + def __init__(self, item: T) -> None: self.item = item diff --git a/mypy/checker.py b/mypy/checker.py index ea7f46af5adb..57725bd9186b 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -39,7 +39,7 @@ from mypy.erasetype import erase_type, erase_typevars, remove_instance_last_known_values from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode from mypy.errors import Errors, ErrorWatcher, report_internal_error -from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance from mypy.join import join_types from mypy.literals import Key, literal, literal_hash from mypy.maptype import map_instance_to_supertype @@ -2488,6 +2488,10 @@ class C(B, A[int]): ... # this is unsafe because... second_sig = self.bind_and_map_method(second, second_type, ctx, base2) ok = is_subtype(first_sig, second_sig, ignore_pos_arg_names=True) elif first_type and second_type: + if isinstance(first.node, Var): + first_type = expand_self_type(first.node, first_type, fill_typevars(ctx)) + if isinstance(second.node, Var): + second_type = expand_self_type(second.node, second_type, fill_typevars(ctx)) ok = is_equivalent(first_type, second_type) if not ok: second_node = base2[name].node @@ -3068,6 +3072,8 @@ def lvalue_type_from_base( if base_var: base_node = base_var.node base_type = base_var.type + if isinstance(base_node, Var) and base_type is not None: + base_type = expand_self_type(base_node, base_type, fill_typevars(expr_node.info)) if isinstance(base_node, Decorator): base_node = base_node.func base_type = base_node.type diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index a271fb876bf3..376e1f811692 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2667,6 +2667,10 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): module_symbol_table = base.node.names + if isinstance(base, RefExpr) and isinstance(base.node, Var): + is_self = base.node.is_self + else: + is_self = False member_type = analyze_member_access( e.name, @@ -2680,6 +2684,7 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type chk=self.chk, in_literal_context=self.is_literal_context(), module_symbol_table=module_symbol_table, + is_self=is_self, ) return member_type diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 6c9da4a6ce7c..c81b3fbe4f7e 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -6,7 +6,7 @@ from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars -from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars +from mypy.expandtype import expand_self_type, expand_type_by_instance, freshen_function_type_vars from mypy.maptype import map_instance_to_supertype from mypy.messages import MessageBuilder from mypy.nodes import ( @@ -37,6 +37,7 @@ erase_to_bound, function_type, make_simplified_union, + supported_self_type, tuple_fallback, type_object_type_from_function, ) @@ -90,6 +91,7 @@ def __init__( self_type: Type | None, module_symbol_table: SymbolTable | None = None, no_deferral: bool = False, + is_self: bool = False, ) -> None: self.is_lvalue = is_lvalue self.is_super = is_super @@ -101,6 +103,7 @@ def __init__( self.chk = chk self.module_symbol_table = module_symbol_table self.no_deferral = no_deferral + self.is_self = is_self def named_type(self, name: str) -> Instance: return self.chk.named_type(name) @@ -152,6 +155,7 @@ def analyze_member_access( self_type: Type | None = None, module_symbol_table: SymbolTable | None = None, no_deferral: bool = False, + is_self: bool = False, ) -> Type: """Return the type of attribute 'name' of 'typ'. @@ -187,6 +191,7 @@ def analyze_member_access( self_type=self_type, module_symbol_table=module_symbol_table, no_deferral=no_deferral, + is_self=is_self, ) result = _analyze_member_access(name, typ, mx, override_info) possible_literal = get_proper_type(result) @@ -682,12 +687,12 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: return inferred_dunder_get_type.ret_type -def is_instance_var(var: Var, info: TypeInfo) -> bool: +def is_instance_var(var: Var) -> bool: """Return if var is an instance variable according to PEP 526.""" return ( # check the type_info node is the var (not a decorated function, etc.) - var.name in info.names - and info.names[var.name].node is var + var.name in var.info.names + and var.info.names[var.name].node is var and not var.is_classvar # variables without annotations are treated as classvar and not var.is_inferred @@ -722,12 +727,16 @@ def analyze_var( mx.msg.read_only_property(name, itype.type, mx.context) if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) + if not (mx.is_self or mx.is_super) or supported_self_type( + get_proper_type(mx.original_type) + ): + typ = expand_self_type(var, typ, mx.original_type) t = get_proper_type(expand_type_by_instance(typ, itype)) result: Type = t typ = get_proper_type(typ) if ( var.is_initialized_in_class - and (not is_instance_var(var, info) or mx.is_operator) + and (not is_instance_var(var) or mx.is_operator) and isinstance(typ, FunctionLike) and not typ.is_type_obj() ): @@ -945,7 +954,12 @@ def analyze_class_attribute_access( # x: T # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime - if isinstance(t, TypeVarType) or has_type_vars(t): + # Exception is Self type wrapped in ClassVar, that is safe. + if node.node.info.self_type is not None and node.node.is_classvar: + exclude = node.node.info.self_type.id + else: + exclude = None + if isinstance(t, TypeVarType) and t.id != exclude or has_type_vars(t, exclude): # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType) or node.implicit: if node.node.is_classvar: @@ -958,6 +972,7 @@ def analyze_class_attribute_access( # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int + t = get_proper_type(expand_self_type(node.node, t, itype)) t = erase_typevars(expand_type_by_instance(t, isuper)) is_classmethod = (is_decorated and cast(Decorator, node.node).func.is_class) or ( diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 3aee6881067e..e1efc10b7a8b 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -186,6 +186,12 @@ def __str__(self) -> str: "General", default_enabled=False, ) +REDUNDANT_SELF_TYPE = ErrorCode( + "redundant-self", + "Warn about redundant Self type annotations on method first argument", + "General", + default_enabled=False, +) # Syntax errors are often blocking. diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 08bc216689fb..5a56857e1114 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -2,7 +2,7 @@ from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload -from mypy.nodes import ARG_STAR +from mypy.nodes import ARG_STAR, Var from mypy.types import ( AnyType, CallableType, @@ -383,3 +383,20 @@ def expand_unpack_with_variables( raise NotImplementedError(f"Invalid type replacement to expand: {repl}") else: raise NotImplementedError(f"Invalid type to expand: {t.type}") + + +@overload +def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType: + ... + + +@overload +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + ... + + +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + """Expand appearances of Self type in a variable type.""" + if var.info.self_type is not None and not var.is_property: + return expand_type(typ, {var.info.self_type.id: replacement}) + return typ diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 219c445497e9..a067763d8d66 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -238,6 +238,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: "variable" ) CLASS_VAR_WITH_TYPEVARS: Final = "ClassVar cannot contain type variables" +CLASS_VAR_WITH_GENERIC_SELF: Final = "ClassVar cannot contain Self type in generic classes" CLASS_VAR_OUTSIDE_OF_CLASS: Final = "ClassVar can only be used for assignments in class body" # Protocol diff --git a/mypy/nodes.py b/mypy/nodes.py index 0ea89611dc1a..7f2fd9a49838 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2813,6 +2813,7 @@ class is generic then it will be a type constructor of higher kind. "has_type_var_tuple_type", "type_var_tuple_prefix", "type_var_tuple_suffix", + "self_type", ) _fullname: Bogus[str] # Fully qualified name @@ -2953,6 +2954,9 @@ class is generic then it will be a type constructor of higher kind. # in case we are doing multiple semantic analysis passes. special_alias: TypeAlias | None + # Shared type variable for typing.Self in this class (if used, otherwise None). + self_type: mypy.types.TypeVarType | None + FLAGS: Final = [ "is_abstract", "is_enum", @@ -3005,6 +3009,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_newtype = False self.is_intersection = False self.metadata = {} + self.self_type = None def add_type_vars(self) -> None: self.has_type_var_tuple_type = False @@ -3222,6 +3227,7 @@ def serialize(self) -> JsonDict: "metadata": self.metadata, "slots": list(sorted(self.slots)) if self.slots is not None else None, "deletable_attributes": self.deletable_attributes, + "self_type": self.self_type.serialize() if self.self_type is not None else None, } return data @@ -3278,6 +3284,8 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: ti.slots = set(data["slots"]) if data["slots"] is not None else None ti.deletable_attributes = data["deletable_attributes"] set_flags(ti, data["flags"]) + st = data["self_type"] + ti.self_type = mypy.types.TypeVarType.deserialize(st) if st is not None else None return ti diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 26bc8ae80fdb..75496d5e56f9 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -2,8 +2,10 @@ from __future__ import annotations +from typing import Optional from typing_extensions import Final +from mypy.expandtype import expand_type from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -50,6 +52,7 @@ TypeVarType, get_proper_type, ) +from mypy.typevars import fill_typevars # The set of decorators that generate dataclasses. dataclass_makers: Final = {"dataclass", "dataclasses.dataclass"} @@ -83,7 +86,7 @@ def __init__( self.info = info self.kw_only = kw_only - def to_argument(self) -> Argument: + def to_argument(self, current_info: TypeInfo) -> Argument: arg_kind = ARG_POS if self.kw_only and self.has_default: arg_kind = ARG_NAMED_OPT @@ -92,11 +95,23 @@ def to_argument(self) -> Argument: elif not self.kw_only and self.has_default: arg_kind = ARG_OPT return Argument( - variable=self.to_var(), type_annotation=self.type, initializer=None, kind=arg_kind + variable=self.to_var(current_info), + type_annotation=self.expand_type(current_info), + initializer=None, + kind=arg_kind, ) - def to_var(self) -> Var: - return Var(self.name, self.type) + def expand_type(self, current_info: TypeInfo) -> Optional[Type]: + if self.type is not None and self.info.self_type is not None: + # In general, it is not safe to call `expand_type()` during semantic analyzis, + # however this plugin is called very late, so all types should be fully ready. + # Also, it is tricky to avoid eager expansion of Self types here (e.g. because + # we serialize attributes). + return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)}) + return self.type + + def to_var(self, current_info: TypeInfo) -> Var: + return Var(self.name, self.expand_type(current_info)) def serialize(self) -> JsonDict: assert self.type @@ -175,11 +190,12 @@ def transform(self) -> bool: and attributes ): - args = [ - attr.to_argument() - for attr in attributes - if attr.is_in_init and not self._is_kw_only_type(attr.type) - ] + with state.strict_optional_set(ctx.api.options.strict_optional): + args = [ + attr.to_argument(info) + for attr in attributes + if attr.is_in_init and not self._is_kw_only_type(attr.type) + ] if info.fallback_to_any: # Make positional args optional since we don't know their order. @@ -548,7 +564,7 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None: if isinstance(var, Var): var.is_property = True else: - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var._fullname = info.fullname + "." + var.name @@ -567,7 +583,7 @@ def _propertize_callables( info = self._ctx.cls.info for attr in attributes: if isinstance(get_proper_type(attr.type), CallableType): - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var.is_settable_property = settable diff --git a/mypy/semanal.py b/mypy/semanal.py index 46ae4b26de3e..b8ffdc98eff5 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -219,12 +219,14 @@ from mypy.semanal_typeddict import TypedDictAnalyzer from mypy.tvar_scope import TypeVarLikeScope from mypy.typeanal import ( + SELF_TYPE_NAMES, TypeAnalyser, TypeVarLikeList, TypeVarLikeQuery, analyze_type_alias, check_for_explicit_any, detect_diverging_alias, + find_self_type, fix_instance_types, has_any_from_unimported_type, no_subscript_builtin_alias, @@ -339,7 +341,7 @@ class SemanticAnalyzer( # Nested block depths of scopes block_depth: list[int] # TypeInfo of directly enclosing class (or None) - type: TypeInfo | None = None + _type: TypeInfo | None = None # Stack of outer classes (the second tuple item contains tvars). type_stack: list[TypeInfo | None] # Type variables bound by the current scope, be it class or function @@ -418,7 +420,7 @@ def __init__( FuncItem | GeneratorExpr | DictionaryComprehension, SymbolTable ] = {} self.imports = set() - self.type = None + self._type = None self.type_stack = [] # Are the namespaces of classes being processed complete? self.incomplete_type_stack: list[bool] = [] @@ -458,6 +460,10 @@ def __init__( # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties + @property + def type(self) -> TypeInfo | None: + return self._type + @property def is_stub_file(self) -> bool: return self._is_stub_file @@ -771,7 +777,7 @@ def file_context( if active_type: scope.leave_class() self.leave_class() - self.type = None + self._type = None self.incomplete_type_stack.pop() del self.options @@ -812,7 +818,10 @@ def analyze_func_def(self, defn: FuncDef) -> None: if defn.type: assert isinstance(defn.type, CallableType) - self.update_function_type_variables(defn.type, defn) + has_self_type = self.update_function_type_variables(defn.type, defn) + else: + has_self_type = False + self.function_stack.pop() if self.is_class_scope(): @@ -823,7 +832,7 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(defn.type, CallableType) if isinstance(get_proper_type(defn.type.ret_type), AnyType): defn.type = defn.type.copy_modified(ret_type=NoneType()) - self.prepare_method_signature(defn, self.type) + self.prepare_method_signature(defn, self.type, has_self_type) # Analyze function signature with self.tvar_scope_frame(self.tvar_scope.method_frame()): @@ -842,6 +851,10 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(result, ProperType) if isinstance(result, CallableType): result = self.remove_unpack_kwargs(defn, result) + if has_self_type and self.type is not None: + info = self.type + if info.self_type is not None: + result.variables = [info.self_type] + list(result.variables) defn.type = result self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) @@ -914,7 +927,7 @@ def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType new_arg_types = typ.arg_types[:-1] + [last_type] return typ.copy_modified(arg_types=new_arg_types, unpack_kwargs=True) - def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: + def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: bool) -> None: """Check basic signature validity and tweak annotation of self/cls argument.""" # Only non-static methods are special. functype = func.type @@ -926,10 +939,51 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): - leading_type: Type = fill_typevars(info) + if has_self_type: + assert self.type is not None and self.type.self_type is not None + leading_type: Type = self.type.self_type + else: + leading_type = fill_typevars(info) if func.is_class or func.name == "__new__": leading_type = self.class_type(leading_type) func.type = replace_implicit_first_type(functype, leading_type) + elif has_self_type and isinstance(func.unanalyzed_type, CallableType): + if not isinstance(get_proper_type(func.unanalyzed_type.arg_types[0]), AnyType): + if self.is_expected_self_type( + self_type, func.is_class or func.name == "__new__" + ): + # This error is off by default, since it is explicitly allowed + # by the PEP 673. + self.fail( + "Redundant Self annotation on method first argument", + func, + code=codes.REDUNDANT_SELF_TYPE, + ) + else: + self.fail( + "Method cannot have explicit self annotation and Self type", func + ) + elif has_self_type: + self.fail("Static methods cannot use Self type", func) + + def is_expected_self_type(self, typ: Type, is_classmethod: bool) -> bool: + """Does this (analyzed or not) type represent the expected Self type for a method?""" + assert self.type is not None + typ = get_proper_type(typ) + if is_classmethod: + if isinstance(typ, TypeType): + return self.is_expected_self_type(typ.item, is_classmethod=False) + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + if sym is not None and sym.fullname == "typing.Type" and typ.args: + return self.is_expected_self_type(typ.args[0], is_classmethod=False) + return False + if isinstance(typ, TypeVarType): + return typ == self.type.self_type + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + return sym is not None and sym.fullname in SELF_TYPE_NAMES + return False def set_original_def(self, previous: Node | None, new: FuncDef | Decorator) -> bool: """If 'new' conditionally redefine 'previous', set 'previous' as original @@ -954,15 +1008,32 @@ def f(): ... # Error: 'f' redefined else: return False - def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None: + def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> bool: """Make any type variables in the signature of defn explicit. Update the signature of defn to contain type variable definitions - if defn is generic. + if defn is generic. Return True, if the signature contains typing.Self + type, or False otherwise. """ with self.tvar_scope_frame(self.tvar_scope.method_frame()): a = self.type_analyzer() - fun_type.variables = a.bind_function_type_variables(fun_type, defn) + fun_type.variables, has_self_type = a.bind_function_type_variables(fun_type, defn) + if has_self_type and self.type is not None: + self.setup_self_type() + return has_self_type + + def setup_self_type(self) -> None: + """Setup a (shared) Self type variable for current class. + + We intentionally don't add it to the class symbol table, + so it can be accessed only by mypy and will not cause + clashes with user defined names. + """ + assert self.type is not None + info = self.type + if info.self_type is not None: + return + info.self_type = TypeVarType("Self", f"{info.fullname}.Self", 0, [], fill_typevars(info)) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.statement = defn @@ -1641,7 +1712,7 @@ def enter_class(self, info: TypeInfo) -> None: self.locals.append(None) # Add class scope self.is_comprehension_stack.append(False) self.block_depth.append(-1) # The class body increments this to 0 - self.type = info + self._type = info self.missing_names.append(set()) def leave_class(self) -> None: @@ -1649,7 +1720,7 @@ def leave_class(self) -> None: self.block_depth.pop() self.locals.pop() self.is_comprehension_stack.pop() - self.type = self.type_stack.pop() + self._type = self.type_stack.pop() self.missing_names.pop() def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: @@ -4069,6 +4140,12 @@ def check_classvar(self, s: AssignmentStmt) -> None: # See https://github.com/python/mypy/issues/11538 self.fail(message_registry.CLASS_VAR_WITH_TYPEVARS, s) + if ( + analyzed is not None + and self.type.self_type in get_type_vars(analyzed) + and self.type.defn.type_vars + ): + self.fail(message_registry.CLASS_VAR_WITH_GENERIC_SELF, s) elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): # In case of member access, report error only when assigning to self # Other kinds of member assignments should be already reported @@ -6104,6 +6181,7 @@ def type_analyzer( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, ) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope @@ -6119,6 +6197,7 @@ def type_analyzer( allow_placeholder=allow_placeholder, allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, + prohibit_self_type=prohibit_self_type, ) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack @@ -6138,6 +6217,7 @@ def anal_type( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, third_pass: bool = False, ) -> Type | None: """Semantically analyze a type. @@ -6160,6 +6240,11 @@ def anal_type( NOTE: The caller shouldn't defer even if this returns None or a placeholder type. """ + has_self_type = find_self_type( + typ, lambda name: self.lookup_qualified(name, typ, suppress_errors=True) + ) + if has_self_type and self.type and prohibit_self_type is None: + self.setup_self_type() a = self.type_analyzer( tvar_scope=tvar_scope, allow_unbound_tvars=allow_unbound_tvars, @@ -6168,6 +6253,7 @@ def anal_type( allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, report_invalid_types=report_invalid_types, + prohibit_self_type=prohibit_self_type, ) tag = self.track_incomplete_refs() typ = typ.accept(a) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 1727c18b6fd9..04308db99e63 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -178,6 +178,7 @@ def check_namedtuple_classdef( stmt.type, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) if analyzed is None: # Something is incomplete. We need to defer this named tuple. @@ -445,6 +446,7 @@ def parse_namedtuple_fields_with_types( type, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) # Workaround #4987 and avoid introducing a bogus UnboundType if isinstance(analyzed, UnboundType): diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 63f4f5516f79..ee9218f02b3e 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -119,6 +119,11 @@ def is_stub_file(self) -> bool: def is_func_scope(self) -> bool: raise NotImplementedError + @property + @abstractmethod + def type(self) -> TypeInfo | None: + raise NotImplementedError + @trait class SemanticAnalyzerInterface(SemanticAnalyzerCoreInterface): @@ -162,6 +167,7 @@ def anal_type( allow_required: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, ) -> Type | None: raise NotImplementedError diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index b864c2a30615..e8be82bd41be 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -305,6 +305,7 @@ def analyze_typeddict_classdef_fields( allow_required=True, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: return None, [], [], set() # Need to defer @@ -500,6 +501,7 @@ def parse_typeddict_fields_with_types( allow_required=True, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: return None diff --git a/mypy/subtypes.py b/mypy/subtypes.py index f928e1cc7918..7e49c19c42bb 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -8,7 +8,7 @@ import mypy.constraints import mypy.typeops from mypy.erasetype import erase_type -from mypy.expandtype import expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type_by_instance from mypy.maptype import map_instance_to_supertype # Circular import; done in the function instead. @@ -1196,6 +1196,8 @@ def find_node_type( ) else: typ = node.type + if typ is not None: + typ = expand_self_type(node, typ, subtype) p_typ = get_proper_type(typ) if typ is None: return AnyType(TypeOfAny.from_error) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 55d819071a3a..18a63011c5bf 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -89,6 +89,7 @@ get_proper_type, ) from mypy.typetraverser import TypeTraverserVisitor +from mypy.typevars import fill_typevars T = TypeVar("T") @@ -117,6 +118,8 @@ "asyncio.futures.Future", } +SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"} + def analyze_type_alias( node: Expression, @@ -148,6 +151,7 @@ def analyze_type_alias( is_typeshed_stub, defining_alias=True, allow_placeholder=allow_placeholder, + prohibit_self_type="type alias target", ) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope @@ -196,6 +200,7 @@ def __init__( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, ) -> None: self.api = api self.lookup_qualified = api.lookup_qualified @@ -231,6 +236,7 @@ def __init__( self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used: set[str] = set() + self.prohibit_self_type = prohibit_self_type def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) @@ -575,6 +581,24 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) return UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + elif fullname in SELF_TYPE_NAMES: + if t.args: + self.fail("Self type cannot have type arguments", t) + if self.prohibit_self_type is not None: + self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t) + return AnyType(TypeOfAny.from_error) + if self.api.type is None: + self.fail("Self type is only allowed in annotations within class definition", t) + return AnyType(TypeOfAny.from_error) + if self.api.type.has_base("builtins.type"): + self.fail("Self type cannot be used in a metaclass", t) + if self.api.type.self_type is not None: + if self.api.type.is_final: + return fill_typevars(self.api.type) + return self.api.type.self_type.copy_modified(line=t.line, column=t.column) + # TODO: verify this is unreachable and replace with an assert? + self.fail("Unexpected Self type", t) + return AnyType(TypeOfAny.from_error) return None def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: @@ -853,7 +877,7 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: if self.defining_alias: variables = t.variables else: - variables = self.bind_function_type_variables(t, t) + variables, _ = self.bind_function_type_variables(t, t) special = self.anal_type_guard(t.ret_type) arg_kinds = t.arg_kinds if len(arg_kinds) >= 2 and arg_kinds[-2] == ARG_STAR and arg_kinds[-1] == ARG_STAR2: @@ -1347,19 +1371,26 @@ def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLik def bind_function_type_variables( self, fun_type: CallableType, defn: Context - ) -> Sequence[TypeVarLikeType]: + ) -> tuple[Sequence[TypeVarLikeType], bool]: """Find the type variables of the function type and bind them in our tvar_scope""" + has_self_type = False if fun_type.variables: defs = [] for var in fun_type.variables: + if self.api.type and self.api.type.self_type and var == self.api.type.self_type: + has_self_type = True + continue var_node = self.lookup_qualified(var.name, defn) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarLikeExpr) binding = self.tvar_scope.bind_new(var.name, var_expr) defs.append(binding) - return defs + return defs, has_self_type typevars = self.infer_type_variables(fun_type) + has_self_type = find_self_type( + fun_type, lambda name: self.api.lookup_qualified(name, defn, suppress_errors=True) + ) # Do not define a new type variable if already defined in scope. typevars = [ (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn) @@ -1375,7 +1406,7 @@ def bind_function_type_variables( binding = self.tvar_scope.bind_new(name, tvar) defs.append(binding) - return defs + return defs, has_self_type def is_defined_type_var(self, tvar: str, context: Context) -> bool: tvar_node = self.lookup_qualified(tvar, context) @@ -1959,3 +1990,19 @@ def visit_instance(self, typ: Instance) -> None: python_version=self.python_version, use_generic_error=True, ) + + +def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: + return typ.accept(HasSelfType(lookup)) + + +class HasSelfType(TypeQuery[bool]): + def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: + self.lookup = lookup + super().__init__(any) + + def visit_unbound_type(self, t: UnboundType) -> bool: + sym = self.lookup(t.name) + if sym and sym.fullname in SELF_TYPE_NAMES: + return True + return super().visit_unbound_type(t) diff --git a/mypy/types.py b/mypy/types.py index a73c41904ea7..242d64ee9075 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -402,7 +402,8 @@ class TypeVarId: # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative - # ids -1, ... for generic function type arguments. This convention + # ids -1, ... for generic function type arguments. A special value 0 + # is reserved for Self type variable (autogenerated). This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. @@ -522,6 +523,8 @@ def copy_modified( values: Bogus[list[Type]] = _dummy, upper_bound: Bogus[Type] = _dummy, id: Bogus[TypeVarId | int] = _dummy, + line: Bogus[int] = _dummy, + column: Bogus[int] = _dummy, ) -> TypeVarType: return TypeVarType( self.name, @@ -530,8 +533,8 @@ def copy_modified( self.values if values is _dummy else values, self.upper_bound if upper_bound is _dummy else upper_bound, self.variance, - self.line, - self.column, + self.line if line is _dummy else line, + self.column if column is _dummy else column, ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -3233,11 +3236,12 @@ def replace_alias_tvars( class HasTypeVars(TypeQuery[bool]): - def __init__(self) -> None: + def __init__(self, exclude: TypeVarId | None = None) -> None: super().__init__(any) + self.exclude = exclude def visit_type_var(self, t: TypeVarType) -> bool: - return True + return t.id != self.exclude def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: return True @@ -3246,9 +3250,9 @@ def visit_param_spec(self, t: ParamSpecType) -> bool: return True -def has_type_vars(typ: Type) -> bool: +def has_type_vars(typ: Type, exclude: TypeVarId | None = None) -> bool: """Check if a type contains any type variables (recursively).""" - return typ.accept(HasTypeVars()) + return typ.accept(HasTypeVars(exclude)) class HasRecursiveType(TypeQuery[bool]): diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index d4064124109b..02abe8f1ddc4 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1933,3 +1933,28 @@ B = List[C] class C(CC): ... class CC: ... [builtins fixtures/dataclasses.pyi] + +[case testDataclassSelfType] +# flags: --strict-optional +from dataclasses import dataclass +from typing import Self, TypeVar, Generic, Optional + +T = TypeVar("T") + +@dataclass +class LinkedList(Generic[T]): + value: T + next: Optional[Self] = None + + def meth(self) -> None: + reveal_type(self.next) # N: Revealed type is "Union[Self`0, None]" + +l_int: LinkedList[int] = LinkedList(1, LinkedList("no", None)) # E: Argument 1 to "LinkedList" has incompatible type "str"; expected "int" + +@dataclass +class SubLinkedList(LinkedList[int]): ... + +lst = SubLinkedList(1, LinkedList(2)) # E: Argument 2 to "SubLinkedList" has incompatible type "LinkedList[int]"; expected "Optional[SubLinkedList]" +reveal_type(lst.next) # N: Revealed type is "Union[__main__.SubLinkedList, None]" +reveal_type(SubLinkedList) # N: Revealed type is "def (value: builtins.int, next: Union[__main__.SubLinkedList, None] =) -> __main__.SubLinkedList" +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 131cd039a467..5fca0f55a0d6 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6312,3 +6312,25 @@ reveal_type(xs) [out] [out2] tmp/m.py:9: note: Revealed type is "builtins.list[builtins.int]" + +[case testTypingSelfCoarse] +import m +[file lib.py] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file m.py] +import lib +class D: ... +[file m.py.2] +import lib +class D(lib.C): ... + +reveal_type(D.meth) +reveal_type(D().meth) +[out] +[out2] +tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0" +tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D" diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 438e17a6ba0a..4eda14c2c592 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -1306,3 +1306,32 @@ class C( [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleSelfItemNotAllowed] +from typing import Self, NamedTuple, Optional + +class NT(NamedTuple): + val: int + next: Optional[Self] # E: Self type cannot be used in NamedTuple item type +NTC = NamedTuple("NTC", [("val", int), ("next", Optional[Self])]) # E: Self type cannot be used in NamedTuple item type +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleTypingSelfMethod] +from typing import Self, NamedTuple, TypeVar, Generic + +T = TypeVar("T") +class NT(NamedTuple, Generic[T]): + key: str + val: T + def meth(self) -> Self: + nt: NT[int] + if bool(): + return nt._replace() # E: Incompatible return value type (got "NT[int]", expected "Self") + else: + return self._replace() + +class SNT(NT[int]): ... +reveal_type(SNT("test", 42).meth()) # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.SNT]" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 77c14b92b261..a8d033444806 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -3890,3 +3890,54 @@ def f() -> str: ... [file package/badmod.py] def nothing() -> int: ... [builtins fixtures/module.pyi] + +[case testProtocolSelfTypeNewSyntax] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... + +class C: + next: C +class S: + next: Self + +x: P = C() +y: P = S() + +z: P +reveal_type(S().next) # N: Revealed type is "__main__.S" +reveal_type(z.next) # N: Revealed type is "__main__.P" +[builtins fixtures/property.pyi] + +[case testProtocolSelfTypeNewSyntaxSubProtocol] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... +class PS(P, Protocol): + @property + def other(self) -> Self: ... + +class C: + next: C + other: C +class S: + next: Self + other: Self + +x: PS = C() +y: PS = S() +[builtins fixtures/property.pyi] + +[case testProtocolClassVarSelfType] +from typing import ClassVar, Self, Protocol + +class P(Protocol): + DEFAULT: ClassVar[Self] +class C: + DEFAULT: ClassVar[C] + +x: P = C() diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 3d801d23a642..a7dc41a2ff86 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -531,15 +531,15 @@ reveal_type(B().ft()) # N: Revealed type is "Tuple[builtins.int, builtins.int, [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeMeta] -from typing import Callable, TypeVar, Type +from typing import Callable, TypeVar, Type, ClassVar T = TypeVar('T') class A(type): @property def g(cls: object) -> int: return 0 @property def gt(cls: T) -> T: return cls - f: Callable[[object], int] - ft: Callable[[T], T] + f: ClassVar[Callable[[object], int]] + ft: ClassVar[Callable[[T], T]] class B(A): pass @@ -1353,3 +1353,380 @@ class Test(Generic[T]): a: deque[List[T]] # previously this failed with 'Incompatible types in assignment (expression has type "deque[List[List[T]]]", variable has type "deque[List[T]]")' b: deque[List[T]] = a.copy() + +[case testTypingSelfBasic] +from typing import Self, List + +class C: + attr: List[Self] + def meth(self) -> List[Self]: ... + def test(self) -> Self: + if bool(): + return C() # E: Incompatible return value type (got "C", expected "Self") + else: + return self +class D(C): ... + +reveal_type(C.meth) # N: Revealed type is "def [Self <: __main__.C] (self: Self`0) -> builtins.list[Self`0]" +C.attr # E: Access to generic instance variables via class is ambiguous +reveal_type(D().meth()) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type(D().attr) # N: Revealed type is "builtins.list[__main__.D]" + +[case testTypingSelfInvalidLocations] +from typing import Self, Callable + +var: Self # E: Self type is only allowed in annotations within class definition +reveal_type(var) # N: Revealed type is "Any" + +def foo() -> Self: ... # E: Self type is only allowed in annotations within class definition +reveal_type(foo) # N: Revealed type is "def () -> Any" + +bad: Callable[[Self], Self] # E: Self type is only allowed in annotations within class definition +reveal_type(bad) # N: Revealed type is "def (Any) -> Any" + +def func() -> None: + var: Self # E: Self type is only allowed in annotations within class definition + +class C(Self): ... # E: Self type is only allowed in annotations within class definition + +[case testTypingSelfInvalidArgs] +from typing import Self, List + +class C: + x: Self[int] # E: Self type cannot have type arguments + def meth(self) -> List[Self[int]]: # E: Self type cannot have type arguments + ... + +[case testTypingSelfConflict] +from typing import Self, TypeVar, Tuple + +T = TypeVar("T") +class C: + def meth(self: T) -> Tuple[Self, T]: ... # E: Method cannot have explicit self annotation and Self type +reveal_type(C().meth()) # N: Revealed type is "Tuple[, __main__.C]" +[builtins fixtures/property.pyi] + +[case testTypingSelfProperty] +from typing import Self, Tuple +class C: + @property + def attr(self) -> Tuple[Self, ...]: ... +class D(C): ... + +reveal_type(D().attr) # N: Revealed type is "builtins.tuple[__main__.D, ...]" +[builtins fixtures/property.pyi] + +[case testTypingSelfCallableVar] +from typing import Self, Callable + +class C: + x: Callable[[Self], Self] + def meth(self) -> Callable[[Self], Self]: ... +class D(C): ... + +reveal_type(C().x) # N: Revealed type is "def (__main__.C) -> __main__.C" +reveal_type(D().x) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().meth()) # N: Revealed type is "def (__main__.D) -> __main__.D" + +[case testTypingSelfClassMethod] +from typing import Self + +class C: + @classmethod + def meth(cls) -> Self: ... + @staticmethod + def bad() -> Self: ... # E: Static methods cannot use Self type \ + # E: A function returning TypeVar should receive at least one argument containing the same TypeVar \ + # N: Consider using the upper bound "C" instead + +class D(C): ... +reveal_type(D.meth()) # N: Revealed type is "__main__.D" +reveal_type(D.bad()) # N: Revealed type is "" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfOverload] +from typing import Self, overload, Union + +class C: + @overload + def foo(self, other: Self) -> Self: ... + @overload + def foo(self, other: int) -> int: ... + def foo(self, other: Union[Self, int]) -> Union[Self, int]: + return other +class D(C): ... +reveal_type(D().foo) # N: Revealed type is "Overload(def (other: __main__.D) -> __main__.D, def (other: builtins.int) -> builtins.int)" + +[case testTypingSelfNestedInAlias] +from typing import Generic, Self, TypeVar, List, Tuple + +T = TypeVar("T") +Pairs = List[Tuple[T, T]] + +class C(Generic[T]): + def pairs(self) -> Pairs[Self]: ... +class D(C[T]): ... +reveal_type(D[int]().pairs()) # N: Revealed type is "builtins.list[Tuple[__main__.D[builtins.int], __main__.D[builtins.int]]]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfOverrideVar] +from typing import Self, TypeVar, Generic + +T = TypeVar("T") +class C(Generic[T]): + x: Self + +class D(C[int]): + x: D +class Bad(C[int]): + x: C[int] # E: Incompatible types in assignment (expression has type "C[int]", base class "C" defined the type as "Bad") + +[case testTypingSelfOverrideVarMulti] +from typing import Self + +class C: + x: Self +class D: + x: C +class E: + x: Good + +class Bad(D, C): # E: Definition of "x" in base class "D" is incompatible with definition in base class "C" + ... +class Good(E, C): + ... + +[case testTypingSelfAlternativeGenericConstructor] +from typing import Self, Generic, TypeVar, Tuple + +T = TypeVar("T") +class C(Generic[T]): + def __init__(self, val: T) -> None: ... + @classmethod + def pair(cls, val: T) -> Tuple[Self, Self]: + return (cls(val), C(val)) # E: Incompatible return value type (got "Tuple[Self, C[T]]", expected "Tuple[Self, Self]") + +class D(C[int]): pass +reveal_type(C.pair(42)) # N: Revealed type is "Tuple[__main__.C[builtins.int], __main__.C[builtins.int]]" +reveal_type(D.pair("no")) # N: Revealed type is "Tuple[__main__.D, __main__.D]" \ + # E: Argument 1 to "pair" of "C" has incompatible type "str"; expected "int" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfMixedTypeVars] +from typing import Self, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + def meth(self, arg: S) -> Tuple[Self, S, T]: ... + +class D(C[int]): ... + +c: C[int] +d: D +reveal_type(c.meth("test")) # N: Revealed type is "Tuple[__main__.C[builtins.int], builtins.str, builtins.int]" +reveal_type(d.meth("test")) # N: Revealed type is "Tuple[__main__.D, builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfRecursiveInit] +from typing import Self + +class C: + def __init__(self, other: Self) -> None: ... +class D(C): ... + +reveal_type(C) # N: Revealed type is "def (other: __main__.C) -> __main__.C" +reveal_type(D) # N: Revealed type is "def (other: __main__.D) -> __main__.D" + +[case testTypingSelfCorrectName] +from typing import Self, List + +class C: + Self = List[C] + def meth(self) -> Self: ... +reveal_type(C.meth) # N: Revealed type is "def (self: __main__.C) -> builtins.list[__main__.C]" + +[case testTypingSelfClassVar] +from typing import Self, ClassVar, Generic, TypeVar + +class C: + DEFAULT: ClassVar[Self] +reveal_type(C.DEFAULT) # N: Revealed type is "__main__.C" + +T = TypeVar("T") +class G(Generic[T]): + BAD: ClassVar[Self] # E: ClassVar cannot contain Self type in generic classes +reveal_type(G.BAD) # N: Revealed type is "__main__.G[Any]" + +[case testTypingSelfMetaClassDisabled] +from typing import Self + +class Meta(type): + def meth(cls) -> Self: ... # E: Self type cannot be used in a metaclass + +[case testTypingSelfNonAnnotationUses] +from typing import Self, List, cast + +class C: + A = List[Self] # E: Self type cannot be used in type alias target + B = cast(Self, ...) + def meth(self) -> A: ... + +class D(C): ... +reveal_type(D().meth()) # N: Revealed type is "builtins.list[Any]" +reveal_type(D().B) # N: Revealed type is "__main__.D" + +[case testTypingSelfInternalSafe] +from typing import Self + +class C: + x: Self + def __init__(self, x: C) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfRedundantAllowed] +from typing import Self, Type + +class C: + def f(self: Self) -> Self: + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfRedundantWarning] +# mypy: enable-error-code="redundant-self" + +from typing import Self, Type + +class C: + def copy(self: Self) -> Self: # E: Redundant Self annotation on method first argument + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: # E: Redundant Self annotation on method first argument + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAssertType] +from typing import Self, assert_type + +class C: + def foo(self) -> None: + assert_type(self, Self) # E: Expression is of type "C", not "Self" + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + + def bar(self) -> Self: + assert_type(self, Self) # OK + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + return self + +[case testTypingSelfTypeVarClash] +from typing import Self, TypeVar, Tuple + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + def foo(self, x: S) -> Tuple[Self, S]: ... + +reveal_type(C.foo) # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`0, x: S`-1) -> Tuple[Self`0, S`-1]" +reveal_type(C().foo(42)) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfTypeVarClashAttr] +from typing import Self, TypeVar, Tuple, Callable + +class Defer(This): ... + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + foo: Callable[[S, Self], Tuple[Self, S]] + +reveal_type(C().foo) # N: Revealed type is "def [S] (S`-1, __main__.C) -> Tuple[__main__.C, S`-1]" +reveal_type(C().foo(42, C())) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +class This: ... +[builtins fixtures/tuple.pyi] + +[case testTypingSelfAttrOldVsNewStyle] +from typing import Self, TypeVar + +T = TypeVar("T", bound=C) +class C: + x: Self + def foo(self: T) -> T: + return self.x + def bar(self: T) -> T: + self.x = self + return self + def baz(self: Self) -> None: + self.x = self + def bad(self) -> None: + # This is unfortunate, but required by PEP 484 + self.x = self # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfClashInBodies] +from typing import Self, TypeVar + +T = TypeVar("T") +class C: + def very_bad(self, x: T) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "T", variable has type "Self") + x: Self + def baz(self: Self, x: T) -> None: + y: T = x + +[case testTypingSelfClashUnrelated] +from typing import Self, Generic, TypeVar + +class B: ... + +T = TypeVar("T", bound=B) +class C(Generic[T]): + def __init__(self, val: T) -> None: + self.val = val + def foo(self) -> Self: ... + +def test(x: C[T]) -> T: + reveal_type(x.val) # N: Revealed type is "T`-1" + return x.val + +[case testTypingSelfGenericBound] +from typing import Self, Generic, TypeVar + +T = TypeVar("T") +class C(Generic[T]): + val: T + def foo(self) -> Self: + reveal_type(self.val) # N: Revealed type is "T`1" + return self + +[case testTypingSelfDifferentImport] +import typing as t + +class Foo: + def foo(self) -> t.Self: + return self + @classmethod + def bar(cls) -> t.Self: + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAllowAliasUseInFinalClasses] +from typing import Self, final + +@final +class C: + def meth(self) -> Self: + return C() # OK for final classes diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 796f2f547528..24521062a5d4 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -2590,3 +2590,14 @@ TD[str](key=0, value=0) # E: Incompatible types (expression has type "int", Typ TD[str]({"key": 0, "value": 0}) # E: Incompatible types (expression has type "int", TypedDict item "value" has type "str") [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testTypedDictSelfItemNotAllowed] +from typing import Self, TypedDict, Optional + +class TD(TypedDict): + val: int + next: Optional[Self] # E: Self type cannot be used in TypedDict item type +TDC = TypedDict("TDC", {"val": int, "next": Optional[Self]}) # E: Self type cannot be used in TypedDict item type + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 32c4ff2eecf0..2fa3deadd16c 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10130,3 +10130,28 @@ b.py:2: error: "int" not callable a.py:1: error: Unsupported operand types for + ("int" and "str") 1 + '' ^~ + +[case testTypingSelfFine] +import m +[file lib.py] +from typing import Any + +class C: + def meth(self, other: Any) -> C: ... +[file lib.py.2] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file n.py] +import lib +class D(lib.C): ... +[file m.py] +from n import D +d = D() +def test() -> None: + d.meth(42) +[out] +== +m.py:4: error: Argument 1 to "meth" of "C" has incompatible type "int"; expected "D" diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index d51134ead599..1a31549463b6 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -5,6 +5,7 @@ overload = 0 Type = 0 Literal = 0 Optional = 0 +Self = 0 T_co = TypeVar('T_co', covariant=True) KT = TypeVar('KT') diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index 378570b4c19c..e398dff3fc6b 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -25,6 +25,7 @@ TypedDict = 0 NoReturn = 0 Required = 0 NotRequired = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 23d97704d934..f3850d3936b4 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -27,6 +27,7 @@ NoReturn = 0 Never = 0 NewType = 0 ParamSpec = 0 +Self = 0 TYPE_CHECKING = 0 T = TypeVar('T') From 6cdee7b9c568e966104c05b7a27ffe43083a1310 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 15 Nov 2022 22:54:14 +0000 Subject: [PATCH 042/292] Delete unused symbol node (#14107) IIRC `ImportedName` was used by old semantic analyzer, that is long gone now. --- mypy/nodes.py | 37 ------------------------------------- 1 file changed, 37 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 7f2fd9a49838..d0f55d6ce5a7 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -490,43 +490,6 @@ def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) -class ImportedName(SymbolNode): - """Indirect reference to a fullname stored in symbol table. - - This node is not present in the original program as such. This is - just a temporary artifact in binding imported names. After semantic - analysis pass 2, these references should be replaced with direct - reference to a real AST node. - - Note that this is neither a Statement nor an Expression so this - can't be visited. - """ - - __slots__ = ("target_fullname",) - - def __init__(self, target_fullname: str) -> None: - super().__init__() - self.target_fullname = target_fullname - - @property - def name(self) -> str: - return self.target_fullname.split(".")[-1] - - @property - def fullname(self) -> str: - return self.target_fullname - - def serialize(self) -> JsonDict: - assert False, "ImportedName leaked from semantic analysis" - - @classmethod - def deserialize(cls, data: JsonDict) -> ImportedName: - assert False, "ImportedName should never be serialized" - - def __str__(self) -> str: - return f"ImportedName({self.target_fullname})" - - FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] From 0d2a954a20f239cde5f958b3e757ee4daab0d8b0 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 15 Nov 2022 22:55:24 +0000 Subject: [PATCH 043/292] Correctly process nested definitions in astmerge (#14104) Fixes #12744 The fix is straightforward. Current logic can produce a random mix of old and new nodes if there are functions/methods nested in other statements. --- mypy/server/astmerge.py | 2 +- test-data/unit/fine-grained.test | 50 ++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 7a6b247c84f8..a14335acca7e 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -213,8 +213,8 @@ def visit_mypy_file(self, node: MypyFile) -> None: super().visit_mypy_file(node) def visit_block(self, node: Block) -> None: - super().visit_block(node) node.body = self.replace_statements(node.body) + super().visit_block(node) def visit_func_def(self, node: FuncDef) -> None: node = self.fixup(node) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 2fa3deadd16c..a6d8f206fbba 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10155,3 +10155,53 @@ def test() -> None: [out] == m.py:4: error: Argument 1 to "meth" of "C" has incompatible type "int"; expected "D" + +[case testNoNestedDefinitionCrash] +import m +[file m.py] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +[file m.py.2] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +# change +[builtins fixtures/dict.pyi] +[out] +== + +[case testNoNestedDefinitionCrash2] +import m +[file m.py] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +[file m.py.2] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +# change +[builtins fixtures/dict.pyi] +[out] +== From e01359d39978a0b2c132533c00567a4c264116ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrzej=20Bartosi=C5=84ski?= <113755748+neob91-close@users.noreply.github.com> Date: Wed, 16 Nov 2022 00:35:09 +0100 Subject: [PATCH 044/292] Prevent warnings from causing dmypy to fail (#14102) Fixes: #14101 This prevents non-error messages (e.g. warnings) from causing dmypy to return exit code 1. --- mypy/dmypy_server.py | 3 ++- test-data/unit/daemon.test | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index be2f4ab8d618..7227cd559946 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -512,7 +512,8 @@ def initialize_fine_grained( print_memory_profile(run_gc=False) - status = 1 if messages else 0 + __, n_notes, __ = count_stats(messages) + status = 1 if messages and n_notes < len(messages) else 0 messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 56966b2f740c..c72dc3a32bc7 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -214,6 +214,20 @@ mypy-daemon: error: Missing target module, package, files, or command. $ dmypy stop Daemon stopped +[case testDaemonWarningSuccessExitCode-posix] +$ dmypy run -- foo.py --follow-imports=error +Daemon started +foo.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs +Success: no issues found in 1 source file +$ echo $? +0 +$ dmypy stop +Daemon stopped +[file foo.py] +def foo(): + a: int = 1 + print(a + "2") + -- this is carefully constructed to be able to break if the quickstart system lets -- something through incorrectly. in particular, the files need to have the same size [case testDaemonQuickstart] From 7d0d1d9d505869d55ee71e370941a8a47122087e Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 16 Nov 2022 00:02:17 +0000 Subject: [PATCH 045/292] Fix crash on nested generic callable (#14093) Fixes #10244 Fixes #13515 This fixes only the crash part, I am going to fix also the embarrassing type variable clash in a separate PR, since it is completely unrelated issue. The crash happens because solver can call `is_suptype()` on the constraint bounds, and those can contain ``. Then if it is a generic callable type (e.g. `def [S] (S) -> T` when used as a context is erased to `def [S] (S) -> `), `is_subtype()` will try unifying them, causing the crash when applying unified arguments. My fix is to simply allow subtyping between callable types that contain ``, we anyway allow checking subtpying between all other types with `` components. And this technically can be useful, e.g. `[T <: DerivedGen1[], T <: DerivedGen2[]]` will be solved as `T <: NonGenBase`. Btw this crash technically has nothing to do with dataclasses, but it looks like there is no other way in mypy to define a callable with generic callable as argument type, if I try: ```python def foo(x: Callable[[S], T]) -> T: ... ``` to repro the crash, mypy instead interprets `foo` as `def [S, T] (x: Callable[[S], T]) -> T`, i.e. the argument type is not generic. I also tried callback protocols, but they also don't repro the crash (at least I can't find a repro), because protocols use variance for subtyping, before actually checking member types. --- mypy/applytype.py | 19 +++++++++++++----- mypy/expandtype.py | 29 ++++++++++++++++++++------- mypy/subtypes.py | 6 +++++- test-data/unit/check-dataclasses.test | 23 +++++++++++++++++++++ 4 files changed, 64 insertions(+), 13 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 1c401664568d..d7f31b36c244 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -73,6 +73,7 @@ def apply_generic_arguments( report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], context: Context, skip_unsatisfied: bool = False, + allow_erased_callables: bool = False, ) -> CallableType: """Apply generic type arguments to a callable type. @@ -130,18 +131,26 @@ def apply_generic_arguments( + callable.arg_names[star_index + 1 :] ) arg_types = ( - [expand_type(at, id_to_type) for at in callable.arg_types[:star_index]] + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[:star_index] + ] + expanded - + [expand_type(at, id_to_type) for at in callable.arg_types[star_index + 1 :]] + + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[star_index + 1 :] + ] ) else: - arg_types = [expand_type(at, id_to_type) for at in callable.arg_types] + arg_types = [ + expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types + ] arg_kinds = callable.arg_kinds arg_names = callable.arg_names # Apply arguments to TypeGuard if any. if callable.type_guard is not None: - type_guard = expand_type(callable.type_guard, id_to_type) + type_guard = expand_type(callable.type_guard, id_to_type, allow_erased_callables) else: type_guard = None @@ -150,7 +159,7 @@ def apply_generic_arguments( return callable.copy_modified( arg_types=arg_types, - ret_type=expand_type(callable.ret_type, id_to_type), + ret_type=expand_type(callable.ret_type, id_to_type, allow_erased_callables), variables=remaining_tvars, type_guard=type_guard, arg_kinds=arg_kinds, diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 5a56857e1114..5bee9abc6dc8 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -39,20 +39,26 @@ @overload -def expand_type(typ: ProperType, env: Mapping[TypeVarId, Type]) -> ProperType: +def expand_type( + typ: ProperType, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> ProperType: ... @overload -def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> Type: ... -def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = False +) -> Type: """Substitute any type variable references in a type given by a type environment. """ - return typ.accept(ExpandTypeVisitor(env)) + return typ.accept(ExpandTypeVisitor(env, allow_erased_callables)) @overload @@ -129,8 +135,11 @@ class ExpandTypeVisitor(TypeVisitor[Type]): variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value - def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: + def __init__( + self, variables: Mapping[TypeVarId, Type], allow_erased_callables: bool = False + ) -> None: self.variables = variables + self.allow_erased_callables = allow_erased_callables def visit_unbound_type(self, t: UnboundType) -> Type: return t @@ -148,8 +157,14 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: - # Should not get here. - raise RuntimeError() + if not self.allow_erased_callables: + raise RuntimeError() + # This may happen during type inference if some function argument + # type is a generic callable, and its erased form will appear in inferred + # constraints, then solver may check subtyping between them, which will trigger + # unify_generic_callables(), this is why we can get here. In all other cases it + # is a sign of a bug, since should never appear in any stored types. + return t def visit_instance(self, t: Instance) -> Type: args = self.expand_types_with_unpack(list(t.args)) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 7e49c19c42bb..ce91b08b2e53 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1667,8 +1667,12 @@ def report(*args: Any) -> None: nonlocal had_errors had_errors = True + # This function may be called by the solver, so we need to allow erased types here. + # We anyway allow checking subtyping between other types containing + # (probably also because solver needs subtyping). See also comment in + # ExpandTypeVisitor.visit_erased_type(). applied = mypy.applytype.apply_generic_arguments( - type, non_none_inferred_vars, report, context=target + type, non_none_inferred_vars, report, context=target, allow_erased_callables=True ) if had_errors: return None diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 02abe8f1ddc4..d289ec5a8e58 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1958,3 +1958,26 @@ lst = SubLinkedList(1, LinkedList(2)) # E: Argument 2 to "SubLinkedList" has in reveal_type(lst.next) # N: Revealed type is "Union[__main__.SubLinkedList, None]" reveal_type(SubLinkedList) # N: Revealed type is "def (value: builtins.int, next: Union[__main__.SubLinkedList, None] =) -> __main__.SubLinkedList" [builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnNestedGenericCallable] +from dataclasses import dataclass +from typing import Generic, TypeVar, Callable + +T = TypeVar('T') +R = TypeVar('R') +X = TypeVar('X') + +@dataclass +class Box(Generic[T]): + inner: T + +@dataclass +class Cont(Generic[R]): + run: Box[Callable[[X], R]] + +def const_two(x: T) -> str: + return "two" + +c = Cont(Box(const_two)) +reveal_type(c) # N: Revealed type is "__main__.Cont[builtins.str]" +[builtins fixtures/dataclasses.pyi] From 6a7c7cdebdad318845959adfaca93c789b250a2e Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 16 Nov 2022 13:36:20 +0000 Subject: [PATCH 046/292] Allow class variable as implementation for read only attribute (#14081) Fixes #10289 Unless I am missing something, this indeed looks safe, so I am going to allow this. --- mypy/messages.py | 1 + mypy/subtypes.py | 5 ++++- test-data/unit/check-protocols.test | 19 +++++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/mypy/messages.py b/mypy/messages.py index 75871d9b5521..1e2a5323b512 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2663,6 +2663,7 @@ def get_bad_protocol_flags( if ( IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags + and IS_SETTABLE in superflags or IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or IS_SETTABLE in superflags diff --git a/mypy/subtypes.py b/mypy/subtypes.py index ce91b08b2e53..2ebecb5d4093 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1040,7 +1040,10 @@ def named_type(fullname: str) -> Instance: if not is_subtype(supertype, subtype): return False if not class_obj: - if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): + if IS_SETTABLE not in superflags: + if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + return False + elif (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): return False else: if IS_VAR in superflags and IS_CLASSVAR not in subflags: diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index a8d033444806..35b62defc558 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -1153,6 +1153,25 @@ x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variabl # N: Protocol member P.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] +[case testClassVarProtocolImmutable] +from typing import Protocol, ClassVar + +class P(Protocol): + @property + def x(self) -> int: ... + +class C: + x: ClassVar[int] + +class Bad: + x: ClassVar[str] + +x: P = C() +y: P = Bad() # E: Incompatible types in assignment (expression has type "Bad", variable has type "P") \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: x: expected "int", got "str" +[builtins fixtures/property.pyi] + [case testSettablePropertyInProtocols] from typing import Protocol From f84f00a2738db3a541861afb7a0f67fb050095aa Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Wed, 16 Nov 2022 09:16:12 -0800 Subject: [PATCH 047/292] Remove `get_line()` and `get_column()` functions (#14071) When I was working on a different PR for Mypy, I came across these functions: ```python def get_line(self) -> int: """Don't use. Use x.line.""" return self.line def get_column(self) -> int: """Don't use. Use x.column.""" return self.column ``` So I just went ahead and removed them. --- mypy/messages.py | 4 ++-- mypy/nodes.py | 8 -------- mypy/report.py | 2 +- mypy/semanal.py | 4 ++-- mypy/semanal_typeargs.py | 2 +- mypy/stats.py | 2 +- mypy/strconv.py | 2 +- 7 files changed, 8 insertions(+), 16 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 1e2a5323b512..b6e34d38e365 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -231,8 +231,8 @@ def span_from_context(ctx: Context) -> tuple[int, int]: else: origin_span = None self.errors.report( - context.get_line() if context else -1, - context.get_column() if context else -1, + context.line if context else -1, + context.column if context else -1, msg, severity=severity, file=file, diff --git a/mypy/nodes.py b/mypy/nodes.py index d0f55d6ce5a7..cf711c45f587 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -71,14 +71,6 @@ def set_line( if end_column is not None: self.end_column = end_column - def get_line(self) -> int: - """Don't use. Use x.line.""" - return self.line - - def get_column(self) -> int: - """Don't use. Use x.column.""" - return self.column - if TYPE_CHECKING: # break import cycle only needed for mypy diff --git a/mypy/report.py b/mypy/report.py index 3fac2234c840..75c372200ca3 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -353,7 +353,7 @@ def indentation_level(self, line_number: int) -> int | None: return None def visit_func_def(self, defn: FuncDef) -> None: - start_line = defn.get_line() - 1 + start_line = defn.line - 1 start_indent = None # When a function is decorated, sometimes the start line will point to # whitespace or comments between the decorator and the function, so diff --git a/mypy/semanal.py b/mypy/semanal.py index b8ffdc98eff5..9b2b4ba44cce 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -6109,12 +6109,12 @@ def fail( return # In case it's a bug and we don't really have context assert ctx is not None, msg - self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker, code=code) + self.errors.report(ctx.line, ctx.column, msg, blocker=blocker, code=code) def note(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: if not self.in_checked_function(): return - self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity="note", code=code) + self.errors.report(ctx.line, ctx.column, msg, severity="note", code=code) def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: if feature not in self.options.enable_incomplete_feature: diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 161775ce8fd9..72903423116f 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -164,4 +164,4 @@ def check_type_var_values( ) def fail(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: - self.errors.report(context.get_line(), context.get_column(), msg, code=code) + self.errors.report(context.line, context.column, msg, code=code) diff --git a/mypy/stats.py b/mypy/stats.py index af6c5fc14a50..b3a32c1ce72c 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -149,7 +149,7 @@ def visit_func_def(self, o: FuncDef) -> None: if o in o.expanded: print( "{}:{}: ERROR: cycle in function expansion; skipping".format( - self.filename, o.get_line() + self.filename, o.line ) ) return diff --git a/mypy/strconv.py b/mypy/strconv.py index 9b369618b88e..f1aa6819e2b7 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -50,7 +50,7 @@ def dump(self, nodes: Sequence[object], obj: mypy.nodes.Context) -> str: number. See mypy.util.dump_tagged for a description of the nodes argument. """ - tag = short_type(obj) + ":" + str(obj.get_line()) + tag = short_type(obj) + ":" + str(obj.line) if self.show_ids: assert self.id_mapper is not None tag += f"<{self.get_id(obj)}>" From 49316f9fb8ccddc3941a1fbe378e4c96e929152f Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 16 Nov 2022 20:20:30 +0000 Subject: [PATCH 048/292] Allow super() for mixin protocols (#14082) Fixes #12344 FWIW this is unsafe (since we don't know where the mixin will appear in the MRO of the actual implementation), but the alternative is having annoying false positives like this issue and e.g. https://github.com/python/mypy/issues/4335 --- mypy/checkexpr.py | 14 ++++++++++++-- test-data/unit/check-selftype.test | 20 ++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 376e1f811692..3d2c69073bc0 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4321,8 +4321,18 @@ def visit_super_expr(self, e: SuperExpr) -> Type: mro = e.info.mro index = mro.index(type_info) if index is None: - self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) - return AnyType(TypeOfAny.from_error) + if ( + instance_info.is_protocol + and instance_info != type_info + and not type_info.is_protocol + ): + # A special case for mixins, in this case super() should point + # directly to the host protocol, this is not safe, since the real MRO + # is not known yet for mixin, but this feature is more like an escape hatch. + index = -1 + else: + self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) + return AnyType(TypeOfAny.from_error) if len(mro) == index + 1: self.chk.fail(message_registry.TARGET_CLASS_HAS_NO_BASE_CLASS, e) diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index a7dc41a2ff86..072978254049 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -792,6 +792,26 @@ reveal_type(f.copy()) # N: Revealed type is "__main__.File" b.copy() # E: Invalid self argument "Bad" to attribute function "copy" with type "Callable[[T], T]" [builtins fixtures/tuple.pyi] +[case testMixinProtocolSuper] +from typing import Protocol + +class Base(Protocol): + def func(self) -> int: + ... + +class TweakFunc: + def func(self: Base) -> int: + return reveal_type(super().func()) # N: Revealed type is "builtins.int" + +class Good: + def func(self) -> int: ... +class C(TweakFunc, Good): pass +C().func() # OK + +class Bad: + def func(self) -> str: ... +class CC(TweakFunc, Bad): pass # E: Definition of "func" in base class "TweakFunc" is incompatible with definition in base class "Bad" + [case testBadClassLevelDecoratorHack] from typing_extensions import Protocol from typing import TypeVar, Any From 48c4a47c457b03caf00be2e2dacd91c3635cdc90 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 16 Nov 2022 21:39:45 +0000 Subject: [PATCH 049/292] Fix type variable clash in nested positions and in attributes (#14095) Addresses the non-crash part of #10244 (and similar situations). The `freshen_function_type_vars()` use in `checkmember.py` was inconsistent: * It needs to be applied to attributes too, not just methods * It needs to be a visitor, since generic callable can appear in a nested position The downsides are ~2% performance regression, and people will see more large ids in `reveal_type()` (since refreshing functions uses a global unique counter). But since this is a correctness issue that can cause really bizarre error messages, I think it is totally worth it. --- mypy/checkmember.py | 41 ++++++++++-------- mypy/expandtype.py | 21 ++++++++++ mypy/typestate.py | 3 +- test-data/unit/check-generics.test | 67 +++++++++++++++++++++++++++++- test-data/unit/check-selftype.test | 2 +- 5 files changed, 113 insertions(+), 21 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index c81b3fbe4f7e..1c38bb4f00dc 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -6,7 +6,11 @@ from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars -from mypy.expandtype import expand_self_type, expand_type_by_instance, freshen_function_type_vars +from mypy.expandtype import ( + expand_self_type, + expand_type_by_instance, + freshen_all_functions_type_vars, +) from mypy.maptype import map_instance_to_supertype from mypy.messages import MessageBuilder from mypy.nodes import ( @@ -66,6 +70,7 @@ get_proper_type, has_type_vars, ) +from mypy.typetraverser import TypeTraverserVisitor if TYPE_CHECKING: # import for forward declaration only import mypy.checker @@ -311,7 +316,7 @@ def analyze_instance_member_access( if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) signature = function_type(method, mx.named_type("builtins.function")) - signature = freshen_function_type_vars(signature) + signature = freshen_all_functions_type_vars(signature) if name == "__new__" or method.is_static: # __new__ is special and behaves like a static method -- don't strip # the first argument. @@ -329,7 +334,7 @@ def analyze_instance_member_access( # Since generic static methods should not be allowed. typ = map_instance_to_supertype(typ, method.info) member_type = expand_type_by_instance(signature, typ) - freeze_type_vars(member_type) + freeze_all_type_vars(member_type) return member_type else: # Not a method. @@ -727,11 +732,13 @@ def analyze_var( mx.msg.read_only_property(name, itype.type, mx.context) if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) + t = freshen_all_functions_type_vars(typ) if not (mx.is_self or mx.is_super) or supported_self_type( get_proper_type(mx.original_type) ): - typ = expand_self_type(var, typ, mx.original_type) - t = get_proper_type(expand_type_by_instance(typ, itype)) + t = expand_self_type(var, t, mx.original_type) + t = get_proper_type(expand_type_by_instance(t, itype)) + freeze_all_type_vars(t) result: Type = t typ = get_proper_type(typ) if ( @@ -759,13 +766,13 @@ def analyze_var( # In `x.f`, when checking `x` against A1 we assume x is compatible with A # and similarly for B1 when checking against B dispatched_type = meet.meet_types(mx.original_type, itype) - signature = freshen_function_type_vars(functype) + signature = freshen_all_functions_type_vars(functype) signature = check_self_arg( signature, dispatched_type, var.is_classmethod, mx.context, name, mx.msg ) signature = bind_self(signature, mx.self_type, var.is_classmethod) expanded_signature = expand_type_by_instance(signature, itype) - freeze_type_vars(expanded_signature) + freeze_all_type_vars(expanded_signature) if var.is_property: # A property cannot have an overloaded type => the cast is fine. assert isinstance(expanded_signature, CallableType) @@ -788,16 +795,14 @@ def analyze_var( return result -def freeze_type_vars(member_type: Type) -> None: - if not isinstance(member_type, ProperType): - return - if isinstance(member_type, CallableType): - for v in member_type.variables: +def freeze_all_type_vars(member_type: Type) -> None: + member_type.accept(FreezeTypeVarsVisitor()) + + +class FreezeTypeVarsVisitor(TypeTraverserVisitor): + def visit_callable_type(self, t: CallableType) -> None: + for v in t.variables: v.id.meta_level = 0 - if isinstance(member_type, Overloaded): - for it in member_type.items: - for v in it.variables: - v.id.meta_level = 0 def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> SymbolNode | None: @@ -1131,11 +1136,11 @@ class B(A[str]): pass if isinstance(t, CallableType): tvars = original_vars if original_vars is not None else [] if is_classmethod: - t = freshen_function_type_vars(t) + t = freshen_all_functions_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None t = cast(CallableType, expand_type_by_instance(t, isuper)) - freeze_type_vars(t) + freeze_all_type_vars(t) return t.copy_modified(variables=list(tvars) + list(t.variables)) elif isinstance(t, Overloaded): return Overloaded( diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 5bee9abc6dc8..70fa62291aa3 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -3,6 +3,7 @@ from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload from mypy.nodes import ARG_STAR, Var +from mypy.type_visitor import TypeTranslator from mypy.types import ( AnyType, CallableType, @@ -130,6 +131,26 @@ def freshen_function_type_vars(callee: F) -> F: return cast(F, fresh_overload) +T = TypeVar("T", bound=Type) + + +def freshen_all_functions_type_vars(t: T) -> T: + result = t.accept(FreshenCallableVisitor()) + assert isinstance(result, type(t)) + return result + + +class FreshenCallableVisitor(TypeTranslator): + def visit_callable_type(self, t: CallableType) -> Type: + result = super().visit_callable_type(t) + assert isinstance(result, ProperType) and isinstance(result, CallableType) + return freshen_function_type_vars(result) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Same as for ExpandTypeVisitor + return t.copy_modified(args=[arg.accept(self) for arg in t.args]) + + class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" diff --git a/mypy/typestate.py b/mypy/typestate.py index a5d65c4b4ea3..7398f0d7f524 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -10,7 +10,7 @@ from mypy.nodes import TypeInfo from mypy.server.trigger import make_trigger -from mypy.types import Instance, Type, get_proper_type +from mypy.types import Instance, Type, TypeVarId, get_proper_type # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship: _TypeAlias = Tuple[Instance, Instance] @@ -275,3 +275,4 @@ def reset_global_state() -> None: """ TypeState.reset_all_subtype_caches() TypeState.reset_protocol_deps() + TypeVarId.next_raw_id = 1 diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 7df52b60fc0b..04108dded723 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -1544,7 +1544,7 @@ class C(Generic[T]): reveal_type(C.F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C("").F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C.F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" -reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" +reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`6) -> __main__.C.F[K`6]" -- Callable subtyping with generic functions @@ -2580,3 +2580,68 @@ class Bar(Foo[AnyStr]): [out] main:10: error: Argument 1 to "method1" of "Foo" has incompatible type "str"; expected "AnyStr" main:10: error: Argument 2 to "method1" of "Foo" has incompatible type "bytes"; expected "AnyStr" + +[case testTypeVariableClashVar] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], R] + +def func(x: C[R]) -> R: + return x.x(42) # OK + +[case testTypeVariableClashVarTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], Tuple[R, T]] + +def func(x: C[R]) -> R: + if bool(): + return x.x(42)[0] # OK + else: + return x.x(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashMethod] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], R]: ... + +def func(x: C[R]) -> R: + return x.x()(42) # OK + +[case testTypeVariableClashMethodTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], Tuple[R, T]]: ... + +def func(x: C[R]) -> R: + if bool(): + return x.x()(42)[0] # OK + else: + return x.x()(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashVarSelf] +from typing import Self, TypeVar, Generic, Callable + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + x: Callable[[S], Self] + y: T + +def foo(x: C[T]) -> T: + return x.x(42).y # OK diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 072978254049..7fcac7ed75e9 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1674,7 +1674,7 @@ class C: def bar(self) -> Self: ... foo: Callable[[S, Self], Tuple[Self, S]] -reveal_type(C().foo) # N: Revealed type is "def [S] (S`-1, __main__.C) -> Tuple[__main__.C, S`-1]" +reveal_type(C().foo) # N: Revealed type is "def [S] (S`1, __main__.C) -> Tuple[__main__.C, S`1]" reveal_type(C().foo(42, C())) # N: Revealed type is "Tuple[__main__.C, builtins.int]" class This: ... [builtins fixtures/tuple.pyi] From 885e361b1cf97260d80e9dfa4e494ff362f0edff Mon Sep 17 00:00:00 2001 From: jhance Date: Thu, 17 Nov 2022 11:41:49 -0800 Subject: [PATCH 050/292] Handle prefix/suffix in typevartuple *args support (#14112) This requires handling more cases in the various places that we previously modified to support *args in general. We also need to refresh the formals-to-actuals twice in checkexpr as now it can happen in the infer_function_type_arguments_using_context call. The handling here is kind of asymmetric, because we can convert prefices into positional arguments, but there is no equivalent for suffices, so we represent that as a Tuple[Unpack[...], ] and handle that case separately in some spots. We also support various edge cases like passing in a tuple without any typevartuples involved. --- mypy/applytype.py | 83 +++++++++++++---------- mypy/checker.py | 17 +++-- mypy/checkexpr.py | 83 ++++++++++++++++++++--- mypy/constraints.py | 22 ++++++- mypy/expandtype.py | 88 +++++++++++++++++++++---- mypy/typevartuples.py | 20 +++++- test-data/unit/check-typevar-tuple.test | 33 ++++++++++ 7 files changed, 284 insertions(+), 62 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index d7f31b36c244..a81ed3cd1f16 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -4,13 +4,14 @@ import mypy.subtypes from mypy.expandtype import expand_type, expand_unpack_with_variables -from mypy.nodes import ARG_POS, ARG_STAR, Context +from mypy.nodes import ARG_STAR, Context from mypy.types import ( AnyType, CallableType, Parameters, ParamSpecType, PartialType, + TupleType, Type, TypeVarId, TypeVarLikeType, @@ -19,6 +20,7 @@ UnpackType, get_proper_type, ) +from mypy.typevartuples import find_unpack_in_list, replace_starargs def get_target_type( @@ -114,39 +116,57 @@ def apply_generic_arguments( # Apply arguments to argument types. var_arg = callable.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - expanded = expand_unpack_with_variables(var_arg.typ, id_to_type) - assert isinstance(expanded, list) - # Handle other cases later. - for t in expanded: - assert not isinstance(t, UnpackType) star_index = callable.arg_kinds.index(ARG_STAR) - arg_kinds = ( - callable.arg_kinds[:star_index] - + [ARG_POS] * len(expanded) - + callable.arg_kinds[star_index + 1 :] + callable = callable.copy_modified( + arg_types=( + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[:star_index] + ] + + [callable.arg_types[star_index]] + + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[star_index + 1 :] + ] + ) ) - arg_names = ( - callable.arg_names[:star_index] - + [None] * len(expanded) - + callable.arg_names[star_index + 1 :] - ) - arg_types = ( - [ - expand_type(at, id_to_type, allow_erased_callables) - for at in callable.arg_types[:star_index] - ] - + expanded - + [ - expand_type(at, id_to_type, allow_erased_callables) - for at in callable.arg_types[star_index + 1 :] + + unpacked_type = get_proper_type(var_arg.typ.type) + if isinstance(unpacked_type, TupleType): + # Assuming for now that because we convert prefixes to positional arguments, + # the first argument is always an unpack. + expanded_tuple = expand_type(unpacked_type, id_to_type) + if isinstance(expanded_tuple, TupleType): + # TODO: handle the case where the tuple has an unpack. This will + # hit an assert below. + expanded_unpack = find_unpack_in_list(expanded_tuple.items) + if expanded_unpack is not None: + callable = callable.copy_modified( + arg_types=( + callable.arg_types[:star_index] + + [expanded_tuple] + + callable.arg_types[star_index + 1 :] + ) + ) + else: + callable = replace_starargs(callable, expanded_tuple.items) + else: + # TODO: handle the case for if we get a variable length tuple. + assert False, f"mypy bug: unimplemented case, {expanded_tuple}" + elif isinstance(unpacked_type, TypeVarTupleType): + expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type) + assert isinstance(expanded_tvt, list) + for t in expanded_tvt: + assert not isinstance(t, UnpackType) + callable = replace_starargs(callable, expanded_tvt) + else: + assert False, "mypy bug: unhandled case applying unpack" + else: + callable = callable.copy_modified( + arg_types=[ + expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types ] ) - else: - arg_types = [ - expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types - ] - arg_kinds = callable.arg_kinds - arg_names = callable.arg_names # Apply arguments to TypeGuard if any. if callable.type_guard is not None: @@ -158,10 +178,7 @@ def apply_generic_arguments( remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( - arg_types=arg_types, ret_type=expand_type(callable.ret_type, id_to_type, allow_erased_callables), variables=remaining_tvars, type_guard=type_guard, - arg_kinds=arg_kinds, - arg_names=arg_names, ) diff --git a/mypy/checker.py b/mypy/checker.py index 57725bd9186b..c7de4911501a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1178,12 +1178,17 @@ def check_func_def( if isinstance(arg_type, ParamSpecType): pass elif isinstance(arg_type, UnpackType): - arg_type = TupleType( - [arg_type], - fallback=self.named_generic_type( - "builtins.tuple", [self.named_type("builtins.object")] - ), - ) + if isinstance(get_proper_type(arg_type.type), TupleType): + # Instead of using Tuple[Unpack[Tuple[...]]], just use + # Tuple[...] + arg_type = arg_type.type + else: + arg_type = TupleType( + [arg_type], + fallback=self.named_generic_type( + "builtins.tuple", [self.named_type("builtins.object")] + ), + ) else: # builtins.tuple[T] is typing.Tuple[T, ...] arg_type = self.named_generic_type("builtins.tuple", [arg_type]) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 3d2c69073bc0..b41a38825fb3 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -150,6 +150,7 @@ TypeVarType, UninhabitedType, UnionType, + UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, @@ -1404,13 +1405,21 @@ def check_callable_call( ) callee = freshen_function_type_vars(callee) callee = self.infer_function_type_arguments_using_context(callee, context) + if need_refresh: + # Argument kinds etc. may have changed due to + # ParamSpec or TypeVarTuple variables being replaced with an arbitrary + # number of arguments; recalculate actual-to-formal map + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) callee = self.infer_function_type_arguments( callee, args, arg_kinds, formal_to_actual, context ) if need_refresh: - # Argument kinds etc. may have changed due to - # ParamSpec variables being replaced with an arbitrary - # number of arguments; recalculate actual-to-formal map formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, @@ -1999,11 +2008,66 @@ def check_argument_types( # Keep track of consumed tuple *arg items. mapper = ArgTypeExpander(self.argument_infer_context()) for i, actuals in enumerate(formal_to_actual): - for actual in actuals: - actual_type = arg_types[actual] + orig_callee_arg_type = get_proper_type(callee.arg_types[i]) + + # Checking the case that we have more than one item but the first argument + # is an unpack, so this would be something like: + # [Tuple[Unpack[Ts]], int] + # + # In this case we have to check everything together, we do this by re-unifying + # the suffices to the tuple, e.g. a single actual like + # Tuple[Unpack[Ts], int] + expanded_tuple = False + if len(actuals) > 1: + first_actual_arg_type = get_proper_type(arg_types[actuals[0]]) + if ( + isinstance(first_actual_arg_type, TupleType) + and len(first_actual_arg_type.items) == 1 + and isinstance(get_proper_type(first_actual_arg_type.items[0]), UnpackType) + ): + # TODO: use walrus operator + actual_types = [first_actual_arg_type.items[0]] + [ + arg_types[a] for a in actuals[1:] + ] + actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) + + assert isinstance(orig_callee_arg_type, TupleType) + assert orig_callee_arg_type.items + callee_arg_types = orig_callee_arg_type.items + callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( + len(orig_callee_arg_type.items) - 1 + ) + expanded_tuple = True + + if not expanded_tuple: + actual_types = [arg_types[a] for a in actuals] + actual_kinds = [arg_kinds[a] for a in actuals] + if isinstance(orig_callee_arg_type, UnpackType): + unpacked_type = get_proper_type(orig_callee_arg_type.type) + # Only case we know of thus far. + assert isinstance(unpacked_type, TupleType) + actual_types = [arg_types[a] for a in actuals] + actual_kinds = [arg_kinds[a] for a in actuals] + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + callee_arg_types = [orig_callee_arg_type] * len(actuals) + callee_arg_kinds = [callee.arg_kinds[i]] * len(actuals) + + assert len(actual_types) == len(actuals) == len(actual_kinds) + + if len(callee_arg_types) != len(actual_types): + # TODO: Improve error message + self.chk.fail("Invalid number of arguments", context) + continue + + assert len(callee_arg_types) == len(actual_types) + assert len(callee_arg_types) == len(callee_arg_kinds) + for actual, actual_type, actual_kind, callee_arg_type, callee_arg_kind in zip( + actuals, actual_types, actual_kinds, callee_arg_types, callee_arg_kinds + ): if actual_type is None: continue # Some kind of error was already reported. - actual_kind = arg_kinds[actual] # Check that a *arg is valid as varargs. if actual_kind == nodes.ARG_STAR and not self.is_valid_var_arg(actual_type): self.msg.invalid_var_arg(actual_type, context) @@ -2013,13 +2077,13 @@ def check_argument_types( is_mapping = is_subtype(actual_type, self.chk.named_type("typing.Mapping")) self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( - actual_type, actual_kind, callee.arg_names[i], callee.arg_kinds[i] + actual_type, actual_kind, callee.arg_names[i], callee_arg_kind ) check_arg( expanded_actual, actual_type, - arg_kinds[actual], - callee.arg_types[i], + actual_kind, + callee_arg_type, actual + 1, i + 1, callee, @@ -4719,6 +4783,7 @@ def is_valid_var_arg(self, typ: Type) -> bool: ) or isinstance(typ, AnyType) or isinstance(typ, ParamSpecType) + or isinstance(typ, UnpackType) ) def is_valid_keyword_var_arg(self, typ: Type) -> bool: diff --git a/mypy/constraints.py b/mypy/constraints.py index 7123c590b7ef..4e78e5ff1117 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -133,8 +133,26 @@ def infer_constraints_for_callable( ) ) - assert isinstance(unpack_type.type, TypeVarTupleType) - constraints.append(Constraint(unpack_type.type, SUPERTYPE_OF, TypeList(actual_types))) + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + constraints.append(Constraint(unpacked_type, SUPERTYPE_OF, TypeList(actual_types))) + elif isinstance(unpacked_type, TupleType): + # Prefixes get converted to positional args, so technically the only case we + # should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out + # not to hold we can always handle the prefixes too. + inner_unpack = unpacked_type.items[0] + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + assert isinstance(inner_unpacked_type, TypeVarTupleType) + suffix_len = len(unpacked_type.items) - 1 + constraints.append( + Constraint( + inner_unpacked_type, SUPERTYPE_OF, TypeList(actual_types[:-suffix_len]) + ) + ) + else: + assert False, "mypy bug: unhandled constraint inference case" + else: for actual in actuals: actual_arg_type = arg_types[actual] diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 70fa62291aa3..43f4e6bcd75b 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -2,7 +2,7 @@ from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload -from mypy.nodes import ARG_STAR, Var +from mypy.nodes import ARG_POS, ARG_STAR, Var from mypy.type_visitor import TypeTranslator from mypy.types import ( AnyType, @@ -36,7 +36,11 @@ UnpackType, get_proper_type, ) -from mypy.typevartuples import split_with_instance, split_with_prefix_and_suffix +from mypy.typevartuples import ( + find_unpack_in_list, + split_with_instance, + split_with_prefix_and_suffix, +) @overload @@ -282,21 +286,83 @@ def visit_callable_type(self, t: CallableType) -> Type: var_arg = t.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - expanded = self.expand_unpack(var_arg.typ) - # Handle other cases later. - assert isinstance(expanded, list) - assert len(expanded) == 1 and isinstance(expanded[0], UnpackType) star_index = t.arg_kinds.index(ARG_STAR) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded - + self.expand_types(t.arg_types[star_index + 1 :]) - ) + + # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] + if isinstance(get_proper_type(var_arg.typ.type), TupleType): + expanded_tuple = get_proper_type(var_arg.typ.type.accept(self)) + # TODO: handle the case that expanded_tuple is a variable length tuple. + assert isinstance(expanded_tuple, TupleType) + expanded_unpack_index = find_unpack_in_list(expanded_tuple.items) + # This is the case where we just have Unpack[Tuple[X1, X2, X3]] + # (for example if either the tuple had no unpacks, or the unpack in the + # tuple got fully expanded to something with fixed length) + if expanded_unpack_index is None: + arg_names = ( + t.arg_names[:star_index] + + [None] * len(expanded_tuple.items) + + t.arg_names[star_index + 1 :] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * len(expanded_tuple.items) + + t.arg_kinds[star_index + 1 :] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_tuple.items + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + # If Unpack[Ts] simplest form still has an unpack or is a + # homogenous tuple, then only the prefix can be represented as + # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] + # as the star arg, for example. + prefix_len = expanded_unpack_index + arg_names = ( + t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * prefix_len + + t.arg_kinds[star_index:] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_tuple.items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType( + expanded_tuple.copy_modified( + items=expanded_tuple.items[prefix_len:] + ) + ) + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + expanded = self.expand_unpack(var_arg.typ) + # Handle other cases later. + assert isinstance(expanded, list) + assert len(expanded) == 1 and isinstance(expanded[0], UnpackType) + + # In this case we keep the arg as ARG_STAR. + arg_names = t.arg_names + arg_kinds = t.arg_kinds + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded + + self.expand_types(t.arg_types[star_index + 1 :]) + ) else: arg_types = self.expand_types(t.arg_types) + arg_names = t.arg_names + arg_kinds = t.arg_kinds return t.copy_modified( arg_types=arg_types, + arg_names=arg_names, + arg_kinds=arg_kinds, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index e93f99d8a825..4b3b5cc2dca7 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -4,7 +4,8 @@ from typing import Sequence, TypeVar -from mypy.types import Instance, ProperType, Type, UnpackType, get_proper_type +from mypy.nodes import ARG_POS, ARG_STAR +from mypy.types import CallableType, Instance, ProperType, Type, UnpackType, get_proper_type def find_unpack_in_list(items: Sequence[Type]) -> int | None: @@ -150,3 +151,20 @@ def extract_unpack(types: Sequence[Type]) -> ProperType | None: if isinstance(proper_type, UnpackType): return get_proper_type(proper_type.type) return None + + +def replace_starargs(callable: CallableType, types: list[Type]) -> CallableType: + star_index = callable.arg_kinds.index(ARG_STAR) + arg_kinds = ( + callable.arg_kinds[:star_index] + + [ARG_POS] * len(types) + + callable.arg_kinds[star_index + 1 :] + ) + arg_names = ( + callable.arg_names[:star_index] + + [None] * len(types) + + callable.arg_names[star_index + 1 :] + ) + arg_types = callable.arg_types[:star_index] + types + callable.arg_types[star_index + 1 :] + + return callable.copy_modified(arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds) diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index d8f6cde10441..d85990293aea 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -381,4 +381,37 @@ def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" +def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]: + reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + return args + +reveal_type(with_prefix_suffix(True, "bar", "foo", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" +reveal_type(with_prefix_suffix(True, "bar", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.int]" + +with_prefix_suffix(True, "bar", "foo", 1.0) # E: Argument 4 to "with_prefix_suffix" has incompatible type "float"; expected "int" +with_prefix_suffix(True, "bar") # E: Too few arguments for "with_prefix_suffix" + +t = (True, "bar", "foo", 5) +reveal_type(with_prefix_suffix(*t)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.str, builtins.int]" +reveal_type(with_prefix_suffix(True, *("bar", "foo"), 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" + +# TODO: handle list case +#reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5)) + +bad_t = (True, "bar") +with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" + +def foo(*args: Unpack[Ts]) -> None: + reveal_type(with_prefix_suffix(True, "bar", *args, 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + +def concrete(*args: Unpack[Tuple[int, str]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +concrete(0, "foo") +concrete(0, 1) # E: Argument 2 to "concrete" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" +concrete("foo", "bar") # E: Argument 1 to "concrete" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" +concrete(0, "foo", 1) # E: Invalid number of arguments +concrete(0) # E: Invalid number of arguments +concrete() # E: Invalid number of arguments + [builtins fixtures/tuple.pyi] From 823667d55ca9387e4645ef2bef649cca90b9829f Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 17 Nov 2022 21:11:30 +0000 Subject: [PATCH 051/292] Fix crash during ParamSpec inference (#14118) Fixes #13903 The fix is straightforward, the formal to actual map needs to be refreshed twice, after both using external _and_ internal type context. --- test-data/unit/check-parameter-specification.test | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 329985c4f75b..b13f74bc3729 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1281,3 +1281,18 @@ class Some(Generic[P]): # TODO: this probably should be reported. def call(*args: P.args, **kwargs: P.kwargs): ... [builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceCrash] +from typing import Callable, Generic, ParamSpec, TypeVar + +def foo(x: int) -> int: ... +T = TypeVar("T") +def bar(x: T) -> T: ... + +P = ParamSpec("P") + +class C(Generic[P]): + def __init__(self, fn: Callable[P, int], *args: P.args, **kwargs: P.kwargs): ... + +reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins.int]]" +[builtins fixtures/paramspec.pyi] From 401798fe7f36dc2e664bfae773c9faf5033aa6df Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 17 Nov 2022 22:01:41 +0000 Subject: [PATCH 052/292] Correctly support self types in callable ClassVar (#14115) Fixes #14108 This fixes both new and old style of working with self types. After all I fixed the new style by simply expanding self type, then `bind_self()` does its job, so effect on the instance will be the same. I had two options fixing this, other one (that I didn't go with) is making the callable generic in new style, if it appears in `ClassVar`. This however has two downsides: implementation is tricky, and this adds and edge case to an existing edge case. So instead I choose internal consistency within the new style, rather than similarity between old and new style. --- mypy/checkmember.py | 17 ++++++++++------- mypy/types.py | 9 ++++----- test-data/unit/check-selftype.test | 22 ++++++++++++++++++++++ 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 1c38bb4f00dc..08d4ff412e4e 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -40,6 +40,7 @@ class_callable, erase_to_bound, function_type, + get_type_vars, make_simplified_union, supported_self_type, tuple_fallback, @@ -68,7 +69,6 @@ TypeVarType, UnionType, get_proper_type, - has_type_vars, ) from mypy.typetraverser import TypeTraverserVisitor @@ -767,6 +767,9 @@ def analyze_var( # and similarly for B1 when checking against B dispatched_type = meet.meet_types(mx.original_type, itype) signature = freshen_all_functions_type_vars(functype) + bound = get_proper_type(expand_self_type(var, signature, mx.original_type)) + assert isinstance(bound, FunctionLike) + signature = bound signature = check_self_arg( signature, dispatched_type, var.is_classmethod, mx.context, name, mx.msg ) @@ -960,11 +963,11 @@ def analyze_class_attribute_access( # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime # Exception is Self type wrapped in ClassVar, that is safe. - if node.node.info.self_type is not None and node.node.is_classvar: - exclude = node.node.info.self_type.id - else: - exclude = None - if isinstance(t, TypeVarType) and t.id != exclude or has_type_vars(t, exclude): + def_vars = set(node.node.info.defn.type_vars) + if not node.node.is_classvar and node.node.info.self_type: + def_vars.add(node.node.info.self_type) + typ_vars = set(get_type_vars(t)) + if def_vars & typ_vars: # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType) or node.implicit: if node.node.is_classvar: @@ -978,7 +981,7 @@ def analyze_class_attribute_access( # C.x -> Any # C[int].x -> int t = get_proper_type(expand_self_type(node.node, t, itype)) - t = erase_typevars(expand_type_by_instance(t, isuper)) + t = erase_typevars(expand_type_by_instance(t, isuper), {tv.id for tv in def_vars}) is_classmethod = (is_decorated and cast(Decorator, node.node).func.is_class) or ( isinstance(node.node, FuncBase) and node.node.is_class diff --git a/mypy/types.py b/mypy/types.py index 242d64ee9075..1de294f9952d 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3236,12 +3236,11 @@ def replace_alias_tvars( class HasTypeVars(TypeQuery[bool]): - def __init__(self, exclude: TypeVarId | None = None) -> None: + def __init__(self) -> None: super().__init__(any) - self.exclude = exclude def visit_type_var(self, t: TypeVarType) -> bool: - return t.id != self.exclude + return True def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: return True @@ -3250,9 +3249,9 @@ def visit_param_spec(self, t: ParamSpecType) -> bool: return True -def has_type_vars(typ: Type, exclude: TypeVarId | None = None) -> bool: +def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" - return typ.accept(HasTypeVars(exclude)) + return typ.accept(HasTypeVars()) class HasRecursiveType(TypeQuery[bool]): diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 7fcac7ed75e9..494ae54400fb 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1750,3 +1750,25 @@ from typing import Self, final class C: def meth(self) -> Self: return C() # OK for final classes + +[case testTypingSelfCallableClassVar] +from typing import Self, ClassVar, Callable, TypeVar + +class C: + f: ClassVar[Callable[[Self], Self]] +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testSelfTypeCallableClassVarOldStyle] +from typing import ClassVar, Callable, TypeVar + +T = TypeVar("T") +class C: + f: ClassVar[Callable[[T], T]] + +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def [T] (T`-1) -> T`-1" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" From 6d1bcc1a97b15097fdfda96ae4fb0fad1e2499d7 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 17 Nov 2022 22:17:35 +0000 Subject: [PATCH 053/292] Fix class objects falling back to metaclass for callback protocol (#14121) Fixes #10482 This is not very principled, but should work except people will want to explicitly check some metaclass `__call__`. --- mypy/messages.py | 2 +- mypy/subtypes.py | 4 ++++ test-data/unit/check-protocols.test | 17 +++++++++++++++++ test-data/unit/fixtures/type.pyi | 8 ++++++-- 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index b6e34d38e365..2f487972d647 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1899,7 +1899,7 @@ def report_protocol_problems( missing = get_missing_protocol_members(subtype, supertype, skip=skip) if ( missing - and len(missing) < len(supertype.type.protocol_members) + and (len(missing) < len(supertype.type.protocol_members) or missing == ["__call__"]) and len(missing) <= MAX_ITEMS ): if missing == ["__call__"] and class_obj: diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 2ebecb5d4093..14109587191c 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1005,6 +1005,10 @@ def named_type(fullname: str) -> Instance: subtype: ProperType | None = mypy.checkmember.type_object_type( left.type, named_type ) + elif member == "__call__" and left.type.is_metaclass(): + # Special case: we want to avoid falling back to metaclass __call__ + # if constructor signature didn't match, this can cause many false negatives. + subtype = None else: subtype = get_proper_type(find_member(member, left, left, class_obj=class_obj)) # Useful for debugging: diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 35b62defc558..8c4aef9b5be0 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -3526,6 +3526,23 @@ test(B) # OK test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ # N: "C" has constructor incompatible with "__call__" of "P" +[case testProtocolClassObjectPureCallback] +from typing import Any, ClassVar, Protocol + +class P(Protocol): + def __call__(self, x: int, y: int) -> Any: ... + +class B: + def __init__(self, x: int, y: int) -> None: ... +class C: + def __init__(self, x: int, y: str) -> None: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: "C" has constructor incompatible with "__call__" of "P" +[builtins fixtures/type.pyi] + [case testProtocolTypeTypeAttribute] from typing import ClassVar, Protocol, Type diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 77feb41ba70b..33dfb5475efa 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -1,8 +1,9 @@ # builtins stub used in type-related test cases. -from typing import Generic, TypeVar, List, Union +from typing import Any, Generic, TypeVar, List, Union -T = TypeVar('T') +T = TypeVar("T") +S = TypeVar("S") class object: def __init__(self) -> None: pass @@ -12,13 +13,16 @@ class list(Generic[T]): pass class type(Generic[T]): __name__: str + def __call__(self, *args: Any, **kwargs: Any) -> Any: pass def __or__(self, other: Union[type, None]) -> type: pass def __ror__(self, other: Union[type, None]) -> type: pass def mro(self) -> List['type']: pass class tuple(Generic[T]): pass +class dict(Generic[T, S]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass +class ellipsis: pass From abb5a809e3ec387f46898817c15a2d3137aee819 Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Fri, 18 Nov 2022 00:32:19 -0500 Subject: [PATCH 054/292] mypy_primer: truncate per-project output (#14091) Closes #14059 --- .github/workflows/mypy_primer_comment.yml | 24 ++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index 2056fc5a40c0..b20eaf471c9a 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -48,15 +48,29 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | + const MAX_CHARACTERS = 30000 + const MAX_CHARACTERS_PER_PROJECT = MAX_CHARACTERS / 3 + const fs = require('fs') let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) - // posting comment fails if too long, so truncate - if (data.length > 30000) { - let truncated_data = data.substring(0, 30000) - let lines_truncated = data.split('\n').length - truncated_data.split('\n').length - data = truncated_data + `\n\n... (truncated ${lines_truncated} lines) ...\n` + + function truncateIfNeeded(original, maxLength) { + if (original.length <= maxLength) { + return original + } + let truncated = original.substring(0, maxLength) + // further, remove last line that might be truncated + truncated = truncated.substring(0, truncated.lastIndexOf('\n')) + let lines_truncated = original.split('\n').length - truncated.split('\n').length + return `${truncated}\n\n... (truncated ${lines_truncated} lines) ...` } + const projects = data.split('\n\n') + // don't let one project dominate + data = projects.map(project => truncateIfNeeded(project, MAX_CHARACTERS_PER_PROJECT)).join('\n\n') + // posting comment fails if too long, so truncate + data = truncateIfNeeded(data, MAX_CHARACTERS) + console.log("Diff from mypy_primer:") console.log(data) From 6e70826f94a8e9018ad1f3e975570ef1a14f2fc4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 18 Nov 2022 01:01:34 -0800 Subject: [PATCH 055/292] Sync typeshed (#13987) Source commit: https://github.com/python/typeshed/commit/263427f438aa7d3f0bd570f671ecba9299c18968 Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/typeshed/stdlib/_ast.pyi | 4 +- mypy/typeshed/stdlib/_codecs.pyi | 61 ++++++++++-------- mypy/typeshed/stdlib/_curses.pyi | 7 +- mypy/typeshed/stdlib/_decimal.pyi | 4 +- mypy/typeshed/stdlib/_msi.pyi | 10 +-- mypy/typeshed/stdlib/_operator.pyi | 32 +++++++--- mypy/typeshed/stdlib/_posixsubprocess.pyi | 42 +++++++----- mypy/typeshed/stdlib/_socket.pyi | 24 ++++--- mypy/typeshed/stdlib/_tkinter.pyi | 6 +- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 6 +- mypy/typeshed/stdlib/_winapi.pyi | 7 +- mypy/typeshed/stdlib/abc.pyi | 5 +- mypy/typeshed/stdlib/antigravity.pyi | 4 +- mypy/typeshed/stdlib/array.pyi | 14 ++-- mypy/typeshed/stdlib/ast.pyi | 48 ++++++++------ mypy/typeshed/stdlib/asyncio/base_events.pyi | 8 +-- mypy/typeshed/stdlib/asyncio/events.pyi | 8 +-- mypy/typeshed/stdlib/asyncio/sslproto.pyi | 2 +- mypy/typeshed/stdlib/asyncio/streams.pyi | 11 ++-- mypy/typeshed/stdlib/asyncio/subprocess.pyi | 2 +- mypy/typeshed/stdlib/asyncio/transports.pyi | 6 +- mypy/typeshed/stdlib/asyncio/trsock.pyi | 29 +++++---- .../stdlib/asyncio/windows_events.pyi | 10 ++- mypy/typeshed/stdlib/asyncore.pyi | 4 +- mypy/typeshed/stdlib/base64.pyi | 4 +- mypy/typeshed/stdlib/binhex.pyi | 5 +- mypy/typeshed/stdlib/builtins.pyi | 64 +++++++++---------- mypy/typeshed/stdlib/bz2.pyi | 8 +-- mypy/typeshed/stdlib/codecs.pyi | 11 ++-- mypy/typeshed/stdlib/ctypes/__init__.pyi | 8 ++- mypy/typeshed/stdlib/datetime.pyi | 11 +++- mypy/typeshed/stdlib/dbm/__init__.pyi | 2 +- mypy/typeshed/stdlib/dbm/dumb.pyi | 3 + mypy/typeshed/stdlib/dbm/gnu.pyi | 8 +-- mypy/typeshed/stdlib/dbm/ndbm.pyi | 8 +-- mypy/typeshed/stdlib/difflib.pyi | 14 ++-- mypy/typeshed/stdlib/dis.pyi | 15 ++--- mypy/typeshed/stdlib/email/__init__.pyi | 2 +- mypy/typeshed/stdlib/email/base64mime.pyi | 12 ++-- mypy/typeshed/stdlib/email/feedparser.pyi | 2 +- mypy/typeshed/stdlib/email/header.pyi | 7 +- mypy/typeshed/stdlib/email/message.pyi | 2 +- .../stdlib/email/mime/application.pyi | 2 +- mypy/typeshed/stdlib/email/mime/audio.pyi | 2 +- mypy/typeshed/stdlib/email/mime/image.pyi | 2 +- mypy/typeshed/stdlib/email/parser.pyi | 2 +- mypy/typeshed/stdlib/email/quoprimime.pyi | 12 ++-- mypy/typeshed/stdlib/encodings/__init__.pyi | 4 +- mypy/typeshed/stdlib/encodings/utf_8.pyi | 7 +- mypy/typeshed/stdlib/encodings/utf_8_sig.pyi | 7 +- mypy/typeshed/stdlib/fcntl.pyi | 2 +- mypy/typeshed/stdlib/gzip.pyi | 16 ++--- mypy/typeshed/stdlib/hmac.pyi | 12 ++-- mypy/typeshed/stdlib/http/client.pyi | 8 +-- mypy/typeshed/stdlib/http/server.pyi | 25 ++++++-- mypy/typeshed/stdlib/imaplib.pyi | 22 +++---- mypy/typeshed/stdlib/imp.pyi | 9 ++- mypy/typeshed/stdlib/importlib/abc.pyi | 40 +++++++----- mypy/typeshed/stdlib/importlib/machinery.pyi | 23 +++---- mypy/typeshed/stdlib/importlib/util.pyi | 6 +- mypy/typeshed/stdlib/inspect.pyi | 2 +- mypy/typeshed/stdlib/io.pyi | 4 +- mypy/typeshed/stdlib/ipaddress.pyi | 4 +- mypy/typeshed/stdlib/json/__init__.pyi | 4 +- mypy/typeshed/stdlib/logging/handlers.pyi | 4 +- mypy/typeshed/stdlib/lzma.pyi | 10 +-- mypy/typeshed/stdlib/mailbox.pyi | 39 +++++------ mypy/typeshed/stdlib/marshal.pyi | 9 +-- mypy/typeshed/stdlib/math.pyi | 25 +++++++- mypy/typeshed/stdlib/mmap.pyi | 7 +- mypy/typeshed/stdlib/msvcrt.pyi | 4 +- .../stdlib/multiprocessing/connection.pyi | 4 +- mypy/typeshed/stdlib/operator.pyi | 1 - mypy/typeshed/stdlib/os/__init__.pyi | 30 ++++++--- mypy/typeshed/stdlib/pathlib.pyi | 16 +++-- mypy/typeshed/stdlib/pickle.pyi | 13 ++-- mypy/typeshed/stdlib/pickletools.pyi | 6 +- mypy/typeshed/stdlib/plistlib.pyi | 14 ++-- mypy/typeshed/stdlib/pyexpat/__init__.pyi | 7 +- mypy/typeshed/stdlib/quopri.pyi | 13 ++-- mypy/typeshed/stdlib/smtplib.pyi | 17 +++-- mypy/typeshed/stdlib/socket.pyi | 30 +++++---- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 12 ++-- mypy/typeshed/stdlib/ssl.pyi | 46 +++++++------ mypy/typeshed/stdlib/struct.pyi | 4 +- mypy/typeshed/stdlib/tarfile.pyi | 2 +- mypy/typeshed/stdlib/termios.pyi | 42 ++++++++---- mypy/typeshed/stdlib/tkinter/commondialog.pyi | 4 +- mypy/typeshed/stdlib/tkinter/dialog.pyi | 2 +- mypy/typeshed/stdlib/tkinter/dnd.pyi | 2 +- mypy/typeshed/stdlib/tkinter/scrolledtext.pyi | 3 +- mypy/typeshed/stdlib/tokenize.pyi | 4 +- mypy/typeshed/stdlib/types.pyi | 4 +- mypy/typeshed/stdlib/typing.pyi | 17 ++--- mypy/typeshed/stdlib/unicodedata.pyi | 5 +- mypy/typeshed/stdlib/unittest/case.pyi | 16 ++--- mypy/typeshed/stdlib/unittest/mock.pyi | 9 +-- mypy/typeshed/stdlib/urllib/parse.pyi | 27 ++++---- mypy/typeshed/stdlib/urllib/response.pyi | 6 +- mypy/typeshed/stdlib/weakref.pyi | 9 ++- mypy/typeshed/stdlib/winsound.pyi | 5 +- mypy/typeshed/stdlib/xml/__init__.pyi | 2 +- mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi | 14 ++-- mypy/typeshed/stdlib/xml/dom/minidom.pyi | 8 +-- .../stdlib/xml/etree/ElementInclude.pyi | 3 +- .../typeshed/stdlib/xml/etree/ElementTree.pyi | 24 +++---- mypy/typeshed/stdlib/xml/parsers/__init__.pyi | 2 +- mypy/typeshed/stdlib/xml/sax/__init__.pyi | 6 +- mypy/typeshed/stdlib/xml/sax/xmlreader.pyi | 2 +- mypy/typeshed/stdlib/zipfile.pyi | 10 ++- mypy/typeshed/stdlib/zipimport.pyi | 9 ++- mypy/typeshed/stdlib/zlib.pyi | 26 ++++---- mypy/typeshed/stdlib/zoneinfo/__init__.pyi | 5 +- 113 files changed, 759 insertions(+), 575 deletions(-) diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index b7d081f6acb2..f723b7eff8bb 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -329,7 +329,7 @@ class JoinedStr(expr): if sys.version_info < (3, 8): class Num(expr): # Deprecated in 3.8; use Constant - n: complex + n: int | float | complex class Str(expr): # Deprecated in 3.8; use Constant s: str @@ -349,7 +349,7 @@ class Constant(expr): kind: str | None # Aliases for value, for backwards compatibility s: Any - n: complex + n: int | float | complex if sys.version_info >= (3, 8): class NamedExpr(expr): diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index 9241ac6a7038..232256fbf614 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -1,5 +1,6 @@ import codecs import sys +from _typeshed import ReadableBuffer from collections.abc import Callable from typing import overload from typing_extensions import Literal, TypeAlias @@ -44,13 +45,13 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... @overload def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] @overload def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... @overload -def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] @overload def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... @@ -64,66 +65,72 @@ def decode( @overload def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... @overload -def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... +def decode(obj: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str: ... def lookup(__encoding: str) -> codecs.CodecInfo: ... def charmap_build(__map: str) -> _CharMap: ... -def ascii_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def charmap_decode(__data: bytes, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ... -def escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... -def latin_1_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... else: - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def readbuffer_encode(__data: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... else: - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info < (3, 8): - def unicode_internal_decode(__obj: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... - def unicode_internal_encode(__obj: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_16_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... ) -> tuple[str, int, int]: ... -def utf_16_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_32_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... ) -> tuple[str, int, int]: ... -def utf_32_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_7_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_8_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def code_page_decode(__codepage: int, __data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def code_page_decode( + __codepage: int, __data: ReadableBuffer, __errors: str | None = ..., __final: int = ... + ) -> tuple[str, int]: ... def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def oem_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index adb1ea84e45b..7053e85f7b7f 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import SupportsRead +from _typeshed import ReadOnlyBuffer, SupportsRead from typing import IO, Any, NamedTuple, overload from typing_extensions import TypeAlias, final if sys.platform != "win32": + # Handled by PyCurses_ConvertToChtype in _cursesmodule.c. _ChType: TypeAlias = str | bytes | int # ACS codes are only initialized after initscr is called @@ -330,7 +331,7 @@ if sys.platform != "win32": def noraw() -> None: ... def pair_content(__pair_number: int) -> tuple[int, int]: ... def pair_number(__attr: int) -> int: ... - def putp(__string: bytes) -> None: ... + def putp(__string: ReadOnlyBuffer) -> None: ... def qiflush(__flag: bool = ...) -> None: ... def raw(__flag: bool = ...) -> None: ... def reset_prog_mode() -> None: ... @@ -352,7 +353,7 @@ if sys.platform != "win32": def tigetnum(__capname: str) -> int: ... def tigetstr(__capname: str) -> bytes | None: ... def tparm( - __str: bytes, + __str: ReadOnlyBuffer, __i1: int = ..., __i2: int = ..., __i3: int = ..., diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index 50c0f23734cd..ca97f69e2147 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -4,7 +4,7 @@ from _typeshed import Self from collections.abc import Container, Sequence from types import TracebackType from typing import Any, ClassVar, NamedTuple, Union, overload -from typing_extensions import TypeAlias +from typing_extensions import Literal, TypeAlias _Decimal: TypeAlias = Decimal | int _DecimalNew: TypeAlias = Union[Decimal, float, str, tuple[int, Sequence[int], int]] @@ -16,7 +16,7 @@ __libmpdec_version__: str class DecimalTuple(NamedTuple): sign: int digits: tuple[int, ...] - exponent: int + exponent: int | Literal["n", "N", "F"] ROUND_DOWN: str ROUND_HALF_UP: str diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi index 9dda8a598549..1b86904d5ebc 100644 --- a/mypy/typeshed/stdlib/_msi.pyi +++ b/mypy/typeshed/stdlib/_msi.pyi @@ -12,11 +12,11 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - # Actual typename Summary, not exposed by the implementation - class _Summary: - def GetProperty(self, propid: int) -> str | bytes | None: ... + # Actual typename SummaryInformation, not exposed by the implementation + class _SummaryInformation: + def GetProperty(self, field: int) -> int | bytes | None: ... def GetPropertyCount(self) -> int: ... - def SetProperty(self, propid: int, value: str | bytes) -> None: ... + def SetProperty(self, field: int, value: int | str) -> None: ... def Persist(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] @@ -25,7 +25,7 @@ if sys.platform == "win32": class _Database: def OpenView(self, sql: str) -> _View: ... def Commit(self) -> None: ... - def GetSummaryInformation(self, updateCount: int) -> _Summary: ... + def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ... def Close(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] diff --git a/mypy/typeshed/stdlib/_operator.pyi b/mypy/typeshed/stdlib/_operator.pyi index 92e04d0f499d..7488724caf74 100644 --- a/mypy/typeshed/stdlib/_operator.pyi +++ b/mypy/typeshed/stdlib/_operator.pyi @@ -1,5 +1,6 @@ import sys -from collections.abc import Callable, Container, Iterable, Mapping, MutableMapping, MutableSequence, Sequence +from _typeshed import SupportsGetItem +from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, TypeVar, overload from typing_extensions import ParamSpec, SupportsIndex, TypeAlias, final @@ -77,11 +78,9 @@ def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... @overload def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... @overload -def getitem(__a: Sequence[_T], __b: SupportsIndex) -> _T: ... -@overload def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... @overload -def getitem(__a: Mapping[_K, _V], __b: _K) -> _V: ... +def getitem(__a: SupportsGetItem[_K, _V], __b: _K) -> _V: ... def indexOf(__a: Iterable[_T], __b: _T) -> int: ... @overload def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... @@ -106,17 +105,30 @@ class attrgetter(Generic[_T_co]): @final class itemgetter(Generic[_T_co]): + # mypy lacks support for PEP 646 https://github.com/python/mypy/issues/12280 + # So we have to define all of these overloads to simulate unpacking the arguments @overload - def __new__(cls, item: Any) -> itemgetter[Any]: ... + def __new__(cls, item: _T_co) -> itemgetter[_T_co]: ... @overload - def __new__(cls, item: Any, __item2: Any) -> itemgetter[tuple[Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co) -> itemgetter[tuple[_T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any) -> itemgetter[tuple[Any, Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co, __item3: _T_co) -> itemgetter[tuple[_T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any, __item4: Any) -> itemgetter[tuple[Any, Any, Any, Any]]: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co + ) -> itemgetter[tuple[_T_co, _T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, *items: Any) -> itemgetter[tuple[Any, ...]]: ... - def __call__(self, obj: Any) -> _T_co: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co, *items: _T_co + ) -> itemgetter[tuple[_T_co, ...]]: ... + # __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie: + # TypeVar "_KT_contra@SupportsGetItem" is contravariant + # "tuple[int, int]" is incompatible with protocol "SupportsIndex" + # preventing [_T_co, ...] instead of [Any, ...] + # + # A suspected mypy issue prevents using [..., _T] instead of [..., Any] here. + # https://github.com/python/mypy/issues/14032 + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... @final class methodcaller: diff --git a/mypy/typeshed/stdlib/_posixsubprocess.pyi b/mypy/typeshed/stdlib/_posixsubprocess.pyi index 2d221c4896f6..ca95336bb503 100644 --- a/mypy/typeshed/stdlib/_posixsubprocess.pyi +++ b/mypy/typeshed/stdlib/_posixsubprocess.pyi @@ -1,24 +1,32 @@ import sys +from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence +from typing_extensions import SupportsIndex if sys.platform != "win32": def cloexec_pipe() -> tuple[int, int]: ... def fork_exec( - args: Sequence[str], - executable_list: Sequence[bytes], - close_fds: bool, - fds_to_keep: Sequence[int], - cwd: str, - env_list: Sequence[bytes], - p2cread: int, - p2cwrite: int, - c2pred: int, - c2pwrite: int, - errread: int, - errwrite: int, - errpipe_read: int, - errpipe_write: int, - restore_signals: int, - start_new_session: int, - preexec_fn: Callable[[], None], + __process_args: Sequence[StrOrBytesPath] | None, + __executable_list: Sequence[bytes], + __close_fds: bool, + __fds_to_keep: tuple[int, ...], + __cwd_obj: str, + __env_list: Sequence[bytes] | None, + __p2cread: int, + __p2cwrite: int, + __c2pred: int, + __c2pwrite: int, + __errread: int, + __errwrite: int, + __errpipe_read: int, + __errpipe_write: int, + __restore_signals: int, + __call_setsid: int, + __pgid_to_set: int, + __gid_object: SupportsIndex | None, + __groups_list: list[int] | None, + __uid_object: SupportsIndex | None, + __child_umask: int, + __preexec_fn: Callable[[], None], + __allow_vfork: bool, ) -> int: ... diff --git a/mypy/typeshed/stdlib/_socket.pyi b/mypy/typeshed/stdlib/_socket.pyi index b2f77893d273..f7b0e6901bf4 100644 --- a/mypy/typeshed/stdlib/_socket.pyi +++ b/mypy/typeshed/stdlib/_socket.pyi @@ -15,10 +15,10 @@ _CMSG: TypeAlias = tuple[int, int, bytes] _CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer] # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, -# AF_NETLINK, AF_TIPC) or strings (AF_UNIX). -_Address: TypeAlias = tuple[Any, ...] | str +# AF_NETLINK, AF_TIPC) or strings/buffers (AF_UNIX). +# See getsockaddrarg() in socketmodule.c. +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer _RetAddress: TypeAlias = Any -# TODO Most methods allow bytes as address objects # ----- Constants ----- # Some socket families are listed in the "Socket families" section of the docs, @@ -583,11 +583,15 @@ class socket: def proto(self) -> int: ... @property def timeout(self) -> float | None: ... - def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... - def bind(self, __address: _Address | bytes) -> None: ... + if sys.platform == "win32": + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | bytes | None = ...) -> None: ... + else: + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... + + def bind(self, __address: _Address) -> None: ... def close(self) -> None: ... - def connect(self, __address: _Address | bytes) -> None: ... - def connect_ex(self, __address: _Address | bytes) -> int: ... + def connect(self, __address: _Address) -> None: ... + def connect_ex(self, __address: _Address) -> int: ... def detach(self) -> int: ... def fileno(self) -> int: ... def getpeername(self) -> _RetAddress: ... @@ -634,7 +638,7 @@ class socket: def setblocking(self, __flag: bool) -> None: ... def settimeout(self, __value: float | None) -> None: ... @overload - def setsockopt(self, __level: int, __optname: int, __value: int | bytes) -> None: ... + def setsockopt(self, __level: int, __optname: int, __value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, __level: int, __optname: int, __value: None, __optlen: int) -> None: ... if sys.platform == "win32": @@ -671,9 +675,9 @@ def ntohs(__x: int) -> int: ... # param & ret val are 16-bit ints def htonl(__x: int) -> int: ... # param & ret val are 32-bit ints def htons(__x: int) -> int: ... # param & ret val are 16-bit ints def inet_aton(__ip_string: str) -> bytes: ... # ret val 4 bytes in length -def inet_ntoa(__packed_ip: bytes) -> str: ... +def inet_ntoa(__packed_ip: ReadableBuffer) -> str: ... def inet_pton(__address_family: int, __ip_string: str) -> bytes: ... -def inet_ntop(__address_family: int, __packed_ip: bytes) -> str: ... +def inet_ntop(__address_family: int, __packed_ip: ReadableBuffer) -> str: ... def getdefaulttimeout() -> float | None: ... def setdefaulttimeout(__timeout: float | None) -> None: ... diff --git a/mypy/typeshed/stdlib/_tkinter.pyi b/mypy/typeshed/stdlib/_tkinter.pyi index c2cf55505afb..fced8c95d2fa 100644 --- a/mypy/typeshed/stdlib/_tkinter.pyi +++ b/mypy/typeshed/stdlib/_tkinter.pyi @@ -17,8 +17,10 @@ from typing_extensions import Literal, final # (, ) @final class Tcl_Obj: - string: str | bytes - typename: str + @property + def string(self) -> str: ... + @property + def typename(self) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, __other): ... def __ge__(self, __other): ... diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index b0ee1f4ad48a..1b54284fe727 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -119,7 +119,7 @@ class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): # stable class SupportsGetItem(Protocol[_KT_contra, _VT_co]): - def __contains__(self, __x: object) -> bool: ... + def __contains__(self, __x: Any) -> bool: ... def __getitem__(self, __key: _KT_contra) -> _VT_co: ... # stable @@ -234,6 +234,7 @@ else: WriteableBuffer: TypeAlias = bytearray | memoryview | array.array[Any] | mmap.mmap | ctypes._CData # stable # Same as _WriteableBuffer, but also includes read-only buffer types (like bytes). ReadableBuffer: TypeAlias = ReadOnlyBuffer | WriteableBuffer # stable +_BufferWithLen: TypeAlias = ReadableBuffer # not stable # noqa: Y047 ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] OptExcInfo: TypeAlias = Union[ExcInfo, tuple[None, None, None]] @@ -275,5 +276,4 @@ StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar -# TODO: Ideally this would be a recursive type alias -TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Callable[[FrameType, str, Any], Any] | None] +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index ddea3d67ed14..3ccac7e6b7e6 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from collections.abc import Sequence from typing import Any, NoReturn, overload from typing_extensions import Literal, final @@ -198,11 +199,11 @@ if sys.platform == "win32": def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... def WaitNamedPipe(__name: str, __timeout: int) -> None: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[False] = ...) -> tuple[int, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = ...) -> tuple[int, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: int | bool) -> tuple[Any, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: event: int diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index f7f82333a362..110eba24a9ca 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -16,10 +16,7 @@ class ABCMeta(type): __mcls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any ) -> Self: ... else: - # pyright doesn't like the first parameter being called mcls, hence the `pyright: ignore` - def __new__( - mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any # pyright: ignore - ) -> Self: ... + def __new__(mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any) -> Self: ... def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... diff --git a/mypy/typeshed/stdlib/antigravity.pyi b/mypy/typeshed/stdlib/antigravity.pyi index e30917511030..3986e7d1c9f2 100644 --- a/mypy/typeshed/stdlib/antigravity.pyi +++ b/mypy/typeshed/stdlib/antigravity.pyi @@ -1 +1,3 @@ -def geohash(latitude: float, longitude: float, datedow: bytes) -> None: ... +from _typeshed import ReadableBuffer + +def geohash(latitude: float, longitude: float, datedow: ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index 2d27cd72e8df..e84456049df6 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -21,15 +21,19 @@ class array(MutableSequence[_T], Generic[_T]): @property def itemsize(self) -> int: ... @overload - def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[int] = ...) -> None: ... + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | bytearray | Iterable[int] = ...) -> None: ... @overload - def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[float] = ...) -> None: ... + def __init__( + self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | bytearray | Iterable[float] = ... + ) -> None: ... @overload - def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[str] = ...) -> None: ... + def __init__( + self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | bytearray | Iterable[str] = ... + ) -> None: ... @overload def __init__(self, __typecode: str, __initializer: Iterable[_T]) -> None: ... @overload - def __init__(self, __typecode: str, __initializer: bytes = ...) -> None: ... + def __init__(self, __typecode: str, __initializer: bytes | bytearray = ...) -> None: ... def append(self, __v: _T) -> None: ... def buffer_info(self) -> tuple[int, int]: ... def byteswap(self) -> None: ... @@ -52,7 +56,7 @@ class array(MutableSequence[_T], Generic[_T]): def tolist(self) -> list[_T]: ... def tounicode(self) -> str: ... if sys.version_info < (3, 9): - def fromstring(self, __buffer: bytes) -> None: ... + def fromstring(self, __buffer: str | ReadableBuffer) -> None: ... def tostring(self) -> bytes: ... def __len__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 6c9dbd0162b8..b2cff5b00264 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -1,5 +1,7 @@ +import os import sys from _ast import * +from _typeshed import ReadableBuffer from collections.abc import Iterator from typing import Any, TypeVar, overload from typing_extensions import Literal @@ -10,7 +12,7 @@ if sys.version_info >= (3, 8): def __init__(cls, *args: object) -> None: ... class Num(Constant, metaclass=_ABC): - value: complex + value: int | float | complex class Str(Constant, metaclass=_ABC): value: str @@ -157,8 +159,8 @@ _T = TypeVar("_T", bound=AST) if sys.version_info >= (3, 8): @overload def parse( - source: str | bytes, - filename: str | bytes = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: Literal["exec"] = ..., *, type_comments: bool = ..., @@ -166,8 +168,8 @@ if sys.version_info >= (3, 8): ) -> Module: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"], *, type_comments: bool = ..., @@ -175,8 +177,8 @@ if sys.version_info >= (3, 8): ) -> Expression: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["func_type"], *, type_comments: bool = ..., @@ -184,8 +186,8 @@ if sys.version_info >= (3, 8): ) -> FunctionType: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"], *, type_comments: bool = ..., @@ -193,7 +195,7 @@ if sys.version_info >= (3, 8): ) -> Interactive: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["eval"], type_comments: bool = ..., @@ -201,7 +203,7 @@ if sys.version_info >= (3, 8): ) -> Expression: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["func_type"], type_comments: bool = ..., @@ -209,7 +211,7 @@ if sys.version_info >= (3, 8): ) -> FunctionType: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["single"], type_comments: bool = ..., @@ -217,8 +219,8 @@ if sys.version_info >= (3, 8): ) -> Interactive: ... @overload def parse( - source: str | bytes, - filename: str | bytes = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: str = ..., *, type_comments: bool = ..., @@ -227,17 +229,23 @@ if sys.version_info >= (3, 8): else: @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: Literal["exec"] = ...) -> Module: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: Literal["exec"] = ... + ) -> Module: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["eval"]) -> Expression: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"] + ) -> Expression: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["single"]) -> Interactive: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"] + ) -> Interactive: ... @overload - def parse(source: str | bytes, *, mode: Literal["eval"]) -> Expression: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["eval"]) -> Expression: ... @overload - def parse(source: str | bytes, *, mode: Literal["single"]) -> Interactive: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["single"]) -> Interactive: ... @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: str = ...) -> AST: ... + def parse(source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: str = ...) -> AST: ... if sys.version_info >= (3, 9): def unparse(ast_obj: AST) -> str: ... diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 8697bfe306c4..c1ab114b6036 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory from asyncio.futures import Future from asyncio.protocols import BaseProtocol @@ -102,7 +102,7 @@ class BaseEventLoop(AbstractEventLoop): async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, family: int = ..., type: int = ..., @@ -411,13 +411,13 @@ class BaseEventLoop(AbstractEventLoop): # BaseEventLoop, only on subclasses. We list them here for now for convenience. async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 586116136c1a..280be4ab5ba9 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike, Self, StrPath, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, Self, StrPath, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from contextvars import Context @@ -194,7 +194,7 @@ class AbstractEventLoop: async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, family: int = ..., type: int = ..., @@ -562,7 +562,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... @abstractmethod - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... @abstractmethod async def sock_connect(self, sock: socket, address: _Address) -> None: ... @abstractmethod @@ -573,7 +573,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... @abstractmethod - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., object], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi index 3bb4db69c123..3c1c7b2e4edb 100644 --- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi +++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -74,7 +74,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): def get_extra_info(self, name: str, default: Any | None = ...) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... def can_write_eof(self) -> Literal[False]: ... if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 139d86b292c3..00d95d93f2ff 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -3,7 +3,7 @@ import sys from _typeshed import Self, StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence from typing import Any -from typing_extensions import TypeAlias +from typing_extensions import SupportsIndex, TypeAlias from . import events, protocols, transports from .base_events import Server @@ -139,8 +139,8 @@ class StreamWriter: ) -> None: ... @property def transport(self) -> transports.WriteTransport: ... - def write(self, data: bytes) -> None: ... - def writelines(self, data: Iterable[bytes]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def close(self) -> None: ... @@ -160,9 +160,10 @@ class StreamReader(AsyncIterator[bytes]): def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: ... - def feed_data(self, data: bytes) -> None: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... async def readline(self) -> bytes: ... - async def readuntil(self, separator: bytes = ...) -> bytes: ... + # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted + async def readuntil(self, separator: bytes | bytearray | memoryview = ...) -> bytes: ... async def read(self, n: int = ...) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... def __aiter__(self: Self) -> Self: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index 32fcf1a65491..7fb588396905 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -38,7 +38,7 @@ class Process: def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - async def communicate(self, input: bytes | None = ...) -> tuple[bytes, bytes]: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = ...) -> tuple[bytes, bytes]: ... if sys.version_info >= (3, 10): async def create_subprocess_shell( diff --git a/mypy/typeshed/stdlib/asyncio/transports.pyi b/mypy/typeshed/stdlib/asyncio/transports.pyi index 3eb3d1ae3173..893292dd12b6 100644 --- a/mypy/typeshed/stdlib/asyncio/transports.pyi +++ b/mypy/typeshed/stdlib/asyncio/transports.pyi @@ -23,8 +23,8 @@ class WriteTransport(BaseTransport): def set_write_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... def get_write_buffer_size(self) -> int: ... def get_write_buffer_limits(self) -> tuple[int, int]: ... - def write(self, data: bytes) -> None: ... - def writelines(self, list_of_data: Iterable[bytes]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... @@ -32,7 +32,7 @@ class WriteTransport(BaseTransport): class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): - def sendto(self, data: bytes, addr: _Address | None = ...) -> None: ... + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = ...) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): diff --git a/mypy/typeshed/stdlib/asyncio/trsock.pyi b/mypy/typeshed/stdlib/asyncio/trsock.pyi index b8972e43d255..742216a84ccd 100644 --- a/mypy/typeshed/stdlib/asyncio/trsock.pyi +++ b/mypy/typeshed/stdlib/asyncio/trsock.pyi @@ -1,5 +1,6 @@ import socket import sys +from _typeshed import ReadableBuffer from builtins import type as Type # alias to avoid name clashes with property named "type" from collections.abc import Iterable from types import TracebackType @@ -7,7 +8,7 @@ from typing import Any, BinaryIO, NoReturn, overload from typing_extensions import TypeAlias # These are based in socket, maybe move them out into _typeshed.pyi or such -_Address: TypeAlias = tuple[Any, ...] | str +_Address: TypeAlias = socket._Address _RetAddress: TypeAlias = Any _WriteBuffer: TypeAlias = bytearray | memoryview _CMSG: TypeAlias = tuple[int, int, bytes] @@ -30,7 +31,7 @@ class TransportSocket: @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... @overload - def setsockopt(self, level: int, optname: int, value: int | bytes) -> None: ... + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... def getpeername(self) -> _RetAddress: ... @@ -42,9 +43,9 @@ class TransportSocket: if sys.version_info < (3, 11): def _na(self, what: str) -> None: ... def accept(self) -> tuple[socket.socket, _RetAddress]: ... - def connect(self, address: _Address | bytes) -> None: ... - def connect_ex(self, address: _Address | bytes) -> int: ... - def bind(self, address: _Address | bytes) -> None: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... + def bind(self, address: _Address) -> None: ... if sys.platform == "win32": def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... else: @@ -57,22 +58,26 @@ class TransportSocket: def detach(self) -> int: ... if sys.platform == "linux": def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> int: ... else: def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> NoReturn: ... def sendmsg( - self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: _Address = ... + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSG] = ..., + __flags: int = ..., + __address: _Address = ..., ) -> int: ... @overload - def sendto(self, data: bytes, address: _Address) -> int: ... + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... @overload - def sendto(self, data: bytes, flags: int, address: _Address) -> int: ... - def send(self, data: bytes, flags: int = ...) -> int: ... - def sendall(self, data: bytes, flags: int = ...) -> None: ... + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... def set_inheritable(self, inheritable: bool) -> None: ... if sys.platform == "win32": def share(self, process_id: int) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi index ffb487fff03a..dca06ea33b13 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,6 +1,6 @@ import socket import sys -from _typeshed import WriteableBuffer +from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from typing import IO, Any, ClassVar, NoReturn from typing_extensions import Literal @@ -50,10 +50,14 @@ if sys.platform == "win32": def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... - def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... + def connect( + self, + conn: socket.socket, + address: tuple[Incomplete, Incomplete] | tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> futures.Future[Any]: ... def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... - async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... + async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... def close(self) -> None: ... SelectorEventLoop = _WindowsSelectorEventLoop diff --git a/mypy/typeshed/stdlib/asyncore.pyi b/mypy/typeshed/stdlib/asyncore.pyi index 0025ec3f9b4e..565deb4d1cad 100644 --- a/mypy/typeshed/stdlib/asyncore.pyi +++ b/mypy/typeshed/stdlib/asyncore.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket from typing import Any, overload from typing_extensions import TypeAlias @@ -45,7 +45,7 @@ class dispatcher: def bind(self, addr: tuple[Any, ...] | str) -> None: ... def connect(self, address: tuple[Any, ...] | str) -> None: ... def accept(self) -> tuple[_Socket, Any] | None: ... - def send(self, data: bytes) -> int: ... + def send(self, data: ReadableBuffer) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/base64.pyi b/mypy/typeshed/stdlib/base64.pyi index c2ec85cac40a..816622eeb071 100644 --- a/mypy/typeshed/stdlib/base64.pyi +++ b/mypy/typeshed/stdlib/base64.pyi @@ -42,7 +42,9 @@ if sys.version_info >= (3, 10): def b32hexdecode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... def a85encode(b: ReadableBuffer, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... -def a85decode(b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: str | bytes = ...) -> bytes: ... +def a85decode( + b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: bytearray | bytes = ... +) -> bytes: ... def b85encode(b: ReadableBuffer, pad: bool = ...) -> bytes: ... def b85decode(b: str | ReadableBuffer) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... diff --git a/mypy/typeshed/stdlib/binhex.pyi b/mypy/typeshed/stdlib/binhex.pyi index 639d30d1d0de..e0993c840ce7 100644 --- a/mypy/typeshed/stdlib/binhex.pyi +++ b/mypy/typeshed/stdlib/binhex.pyi @@ -1,3 +1,4 @@ +from _typeshed import _BufferWithLen from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -27,9 +28,9 @@ class openrsrc: class BinHex: def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... - def write(self, data: bytes) -> None: ... + def write(self, data: _BufferWithLen) -> None: ... def close_data(self) -> None: ... - def write_rsrc(self, data: bytes) -> None: ... + def write_rsrc(self, data: _BufferWithLen) -> None: ... def close(self) -> None: ... def binhex(inp: str, out: str) -> None: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index d3b3f677b370..00eac9e49cf0 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -50,7 +50,6 @@ from typing import ( # noqa: Y027 SupportsComplex, SupportsFloat, SupportsInt, - SupportsRound, TypeVar, overload, type_check_only, @@ -299,7 +298,7 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls: type[Self], x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def __new__(cls: type[Self], __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @@ -495,15 +494,11 @@ class str(Sequence[str]): class bytes(ByteString): @overload - def __new__(cls: type[Self], __ints: Iterable[SupportsIndex]) -> Self: ... + def __new__(cls: type[Self], __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... @overload def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... @overload - def __new__(cls: type[Self], __length: SupportsIndex) -> Self: ... - @overload def __new__(cls: type[Self]) -> Self: ... - @overload - def __new__(cls: type[Self], __o: SupportsBytes) -> Self: ... def capitalize(self) -> bytes: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... def count( @@ -589,7 +584,7 @@ class bytes(ByteString): def __rmul__(self, __n: SupportsIndex) -> bytes: ... def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, __o: SupportsIndex | bytes) -> bool: ... # type: ignore[override] + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... def __lt__(self, __x: bytes) -> bool: ... @@ -604,11 +599,9 @@ class bytearray(MutableSequence[int], ByteString): @overload def __init__(self) -> None: ... @overload - def __init__(self, __ints: Iterable[SupportsIndex]) -> None: ... + def __init__(self, __ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer) -> None: ... @overload def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... - @overload - def __init__(self, __length: SupportsIndex) -> None: ... def append(self, __item: SupportsIndex) -> None: ... def capitalize(self) -> bytearray: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... @@ -712,14 +705,14 @@ class bytearray(MutableSequence[int], ByteString): def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... - def __lt__(self, __x: bytes) -> bool: ... - def __le__(self, __x: bytes) -> bool: ... - def __gt__(self, __x: bytes) -> bool: ... - def __ge__(self, __x: bytes) -> bool: ... + def __lt__(self, __x: ReadableBuffer) -> bool: ... + def __le__(self, __x: ReadableBuffer) -> bool: ... + def __gt__(self, __x: ReadableBuffer) -> bool: ... + def __ge__(self, __x: ReadableBuffer) -> bool: ... def __alloc__(self) -> int: ... @final -class memoryview(Sized, Sequence[int]): +class memoryview(Sequence[int]): @property def format(self) -> str: ... @property @@ -735,7 +728,7 @@ class memoryview(Sized, Sequence[int]): @property def ndim(self) -> int: ... @property - def obj(self) -> bytes | bytearray: ... + def obj(self) -> ReadableBuffer: ... @property def c_contiguous(self) -> bool: ... @property @@ -1239,19 +1232,13 @@ def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: @overload def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... -# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): - def isinstance( - __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - def issubclass( - __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] else: - def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... - def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... def locals() -> dict[str, Any]: ... @@ -1539,12 +1526,21 @@ class reversed(Iterator[_T], Generic[_T]): def __length_hint__(self) -> int: ... def repr(__obj: object) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, __ndigits: int) -> _T_co: ... + @overload -def round(number: SupportsRound[Any]) -> int: ... -@overload -def round(number: SupportsRound[Any], ndigits: None) -> int: ... +def round(number: _SupportsRound1[_T], ndigits: None = ...) -> _T: ... @overload -def round(number: SupportsRound[_T], ndigits: SupportsIndex) -> _T: ... +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... # See https://github.com/python/typeshed/pull/6292#discussion_r748875189 # for why arg 3 of `setattr` should be annotated with `Any` and not `object` @@ -1586,8 +1582,12 @@ else: @overload def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... -# The argument to `vars()` has to have a `__dict__` attribute, so can't be annotated with `object` +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) +# Use a type: ignore to make complaints about overlapping overloads go away +@overload +def vars(__object: type) -> types.MappingProxyType[str, Any]: ... # type: ignore[misc] +@overload def vars(__object: Any = ...) -> dict[str, Any]: ... class zip(Iterator[_T_co], Generic[_T_co]): diff --git a/mypy/typeshed/stdlib/bz2.pyi b/mypy/typeshed/stdlib/bz2.pyi index cea317e28037..295271d4a80b 100644 --- a/mypy/typeshed/stdlib/bz2.pyi +++ b/mypy/typeshed/stdlib/bz2.pyi @@ -19,8 +19,8 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: bytes, compresslevel: int = ...) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def compress(data: ReadableBuffer, compresslevel: int = ...) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -132,12 +132,12 @@ class BZ2File(BaseStream, IO[bytes]): @final class BZ2Compressor: def __init__(self, compresslevel: int = ...) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... @final class BZ2Decompressor: - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... @property def eof(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index a7b60e38df11..cd6ac0006c53 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,12 +1,11 @@ import types -from _typeshed import Self +from _codecs import * +from _typeshed import ReadableBuffer, Self from abc import abstractmethod from collections.abc import Callable, Generator, Iterable from typing import Any, BinaryIO, Protocol, TextIO from typing_extensions import Literal -from _codecs import * - __all__ = [ "register", "lookup", @@ -173,7 +172,7 @@ class IncrementalDecoder: errors: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod - def decode(self, input: bytes, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... def reset(self) -> None: ... def getstate(self) -> tuple[bytes, int]: ... def setstate(self, state: tuple[bytes, int]) -> None: ... @@ -190,8 +189,8 @@ class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes def __init__(self, errors: str = ...) -> None: ... @abstractmethod - def _buffer_decode(self, input: bytes, errors: str, final: bool) -> tuple[str, int]: ... - def decode(self, input: bytes, final: bool = ...) -> str: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 78f4ee4d5ab3..1851d3481ee2 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -194,7 +194,7 @@ class _SimpleCData(Generic[_T], _CData): class c_byte(_SimpleCData[int]): ... class c_char(_SimpleCData[bytes]): - def __init__(self, value: int | bytes = ...) -> None: ... + def __init__(self, value: int | bytes | bytearray = ...) -> None: ... class c_char_p(_PointerLike, _SimpleCData[bytes | None]): def __init__(self, value: int | bytes | None = ...) -> None: ... @@ -266,7 +266,11 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 5926ff0a808e..b1b3c17ee25b 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -1,7 +1,8 @@ import sys from _typeshed import Self +from abc import abstractmethod from time import struct_time -from typing import ClassVar, NamedTuple, NoReturn, SupportsAbs, TypeVar, overload +from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload from typing_extensions import Literal, TypeAlias, final if sys.version_info >= (3, 11): @@ -15,8 +16,11 @@ MINYEAR: Literal[1] MAXYEAR: Literal[9999] class tzinfo: + @abstractmethod def tzname(self, __dt: datetime | None) -> str | None: ... + @abstractmethod def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + @abstractmethod def dst(self, __dt: datetime | None) -> timedelta | None: ... def fromutc(self, __dt: datetime) -> datetime: ... @@ -29,6 +33,9 @@ class timezone(tzinfo): min: ClassVar[timezone] max: ClassVar[timezone] def __init__(self, offset: timedelta, name: str = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str: ... + def utcoffset(self, __dt: datetime | None) -> timedelta: ... + def dst(self, __dt: datetime | None) -> None: ... if sys.version_info >= (3, 11): UTC: timezone @@ -152,7 +159,7 @@ class time: _Date: TypeAlias = date _Time: TypeAlias = time -class timedelta(SupportsAbs[timedelta]): +class timedelta: min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi index 9e99f0d5e74c..33b8aab96610 100644 --- a/mypy/typeshed/stdlib/dbm/__init__.pyi +++ b/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -6,7 +6,7 @@ from typing_extensions import Literal, TypeAlias __all__ = ["open", "whichdb", "error"] _KeyType: TypeAlias = str | bytes -_ValueType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes | bytearray _TFlags: TypeAlias = Literal[ "r", "w", diff --git a/mypy/typeshed/stdlib/dbm/dumb.pyi b/mypy/typeshed/stdlib/dbm/dumb.pyi index 4fd199f19728..738e68968ca8 100644 --- a/mypy/typeshed/stdlib/dbm/dumb.pyi +++ b/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -10,6 +10,9 @@ _ValueType: TypeAlias = str | bytes error = OSError +# This class doesn't exist at runtime. open() can return an instance of +# any of the three implementations of dbm (dumb, gnu, ndbm), and this +# class is intended to represent the common interface supported by all three. class _Database(MutableMapping[_KeyType, bytes]): def __init__(self, filebasename: str, mode: str, flag: str = ...) -> None: ... def sync(self) -> None: ... diff --git a/mypy/typeshed/stdlib/dbm/gnu.pyi b/mypy/typeshed/stdlib/dbm/gnu.pyi index 561206c4e0be..93b9df1077ce 100644 --- a/mypy/typeshed/stdlib/dbm/gnu.pyi +++ b/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer, Self from types import TracebackType from typing import TypeVar, overload from typing_extensions import TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer open_flags: str @@ -31,7 +31,7 @@ if sys.platform != "win32": @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime diff --git a/mypy/typeshed/stdlib/dbm/ndbm.pyi b/mypy/typeshed/stdlib/dbm/ndbm.pyi index f1032bf3cae7..ca658098bd5c 100644 --- a/mypy/typeshed/stdlib/dbm/ndbm.pyi +++ b/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer, Self from types import TracebackType from typing import TypeVar, overload from typing_extensions import TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer class error(OSError): ... library: str @@ -27,7 +27,7 @@ if sys.platform != "win32": @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime diff --git a/mypy/typeshed/stdlib/difflib.pyi b/mypy/typeshed/stdlib/difflib.pyi index 854a53d433ae..df2f8be0168a 100644 --- a/mypy/typeshed/stdlib/difflib.pyi +++ b/mypy/typeshed/stdlib/difflib.pyi @@ -127,12 +127,12 @@ class HtmlDiff: def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], - a: Sequence[bytes], - b: Sequence[bytes], - fromfile: bytes = ..., - tofile: bytes = ..., - fromfiledate: bytes = ..., - tofiledate: bytes = ..., + a: Iterable[bytes | bytearray], + b: Iterable[bytes | bytearray], + fromfile: bytes | bytearray = ..., + tofile: bytes | bytearray = ..., + fromfiledate: bytes | bytearray = ..., + tofiledate: bytes | bytearray = ..., n: int = ..., - lineterm: bytes = ..., + lineterm: bytes | bytearray = ..., ) -> Iterator[bytes]: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index dd31d981071f..73adba5c19f5 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -37,7 +37,6 @@ __all__ = [ # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeType | type | Callable[..., Any] -_HaveCodeOrStringType: TypeAlias = _HaveCodeType | str | bytes if sys.version_info >= (3, 11): class Positions(NamedTuple): @@ -75,7 +74,7 @@ class Bytecode: if sys.version_info >= (3, 11): def __init__( self, - x: _HaveCodeOrStringType, + x: _HaveCodeType | str, *, first_line: int | None = ..., current_offset: int | None = ..., @@ -87,9 +86,7 @@ class Bytecode: cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ..., adaptive: bool = ... ) -> Self: ... else: - def __init__( - self, x: _HaveCodeOrStringType, *, first_line: int | None = ..., current_offset: int | None = ... - ) -> None: ... + def __init__(self, x: _HaveCodeType | str, *, first_line: int | None = ..., current_offset: int | None = ...) -> None: ... @classmethod def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... @@ -102,11 +99,11 @@ COMPILER_FLAG_NAMES: dict[int, str] def findlabels(code: _HaveCodeType) -> list[int]: ... def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... def pretty_flags(flags: int) -> str: ... -def code_info(x: _HaveCodeOrStringType) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... if sys.version_info >= (3, 11): def dis( - x: _HaveCodeOrStringType | None = ..., + x: _HaveCodeType | str | bytes | bytearray | None = ..., *, file: IO[str] | None = ..., depth: int | None = ..., @@ -115,7 +112,9 @@ if sys.version_info >= (3, 11): ) -> None: ... else: - def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ..., depth: int | None = ...) -> None: ... + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = ..., *, file: IO[str] | None = ..., depth: int | None = ... + ) -> None: ... if sys.version_info >= (3, 11): def disassemble( diff --git a/mypy/typeshed/stdlib/email/__init__.pyi b/mypy/typeshed/stdlib/email/__init__.pyi index 4591b2c3340e..6b59dc73d5cc 100644 --- a/mypy/typeshed/stdlib/email/__init__.pyi +++ b/mypy/typeshed/stdlib/email/__init__.pyi @@ -9,7 +9,7 @@ _ParamType: TypeAlias = Union[str, tuple[str | None, str | None, str]] # noqa: _ParamsType: TypeAlias = Union[str, None, tuple[str, str | None, str]] # noqa: Y047 def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... -def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_bytes(s: bytes | bytearray, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/base64mime.pyi b/mypy/typeshed/stdlib/email/base64mime.pyi index e55658046f55..16118a879ad7 100644 --- a/mypy/typeshed/stdlib/email/base64mime.pyi +++ b/mypy/typeshed/stdlib/email/base64mime.pyi @@ -1,9 +1,13 @@ __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] -def header_length(bytearray: str | bytes) -> int: ... -def header_encode(header_bytes: str | bytes, charset: str = ...) -> str: ... -def body_encode(s: bytes, maxlinelen: int = ..., eol: str = ...) -> str: ... -def decode(string: str | bytes) -> bytes: ... +from _typeshed import ReadableBuffer + +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = ...) -> str: ... + +# First argument should be a buffer that supports slicing and len(). +def body_encode(s: bytes | bytearray, maxlinelen: int = ..., eol: str = ...) -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... body_decode = decode decodestring = decode diff --git a/mypy/typeshed/stdlib/email/feedparser.pyi b/mypy/typeshed/stdlib/email/feedparser.pyi index c535c353daad..809f0b0e112b 100644 --- a/mypy/typeshed/stdlib/email/feedparser.pyi +++ b/mypy/typeshed/stdlib/email/feedparser.pyi @@ -20,5 +20,5 @@ class BytesFeedParser(Generic[_MessageT]): def __init__(self: BytesFeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... - def feed(self, data: bytes) -> None: ... + def feed(self, data: bytes | bytearray) -> None: ... def close(self) -> _MessageT: ... diff --git a/mypy/typeshed/stdlib/email/header.pyi b/mypy/typeshed/stdlib/email/header.pyi index 9248759168a9..58740bd1bdae 100644 --- a/mypy/typeshed/stdlib/email/header.pyi +++ b/mypy/typeshed/stdlib/email/header.pyi @@ -1,3 +1,4 @@ +from collections.abc import Iterable from email.charset import Charset from typing import Any @@ -6,14 +7,14 @@ __all__ = ["Header", "decode_header", "make_header"] class Header: def __init__( self, - s: bytes | str | None = ..., + s: bytes | bytearray | str | None = ..., charset: Charset | str | None = ..., maxlinelen: int | None = ..., header_name: str | None = ..., continuation_ws: str = ..., errors: str = ..., ) -> None: ... - def append(self, s: bytes | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... def encode(self, splitchars: str = ..., maxlinelen: int | None = ..., linesep: str = ...) -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... @@ -23,7 +24,7 @@ class Header: # contains at least one encoded part. def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... def make_header( - decoded_seq: list[tuple[bytes, str | None]], + decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], maxlinelen: int | None = ..., header_name: str | None = ..., continuation_ws: str = ..., diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 4e8f600f7ffd..3c59aeeb2d01 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -12,7 +12,7 @@ __all__ = ["Message", "EmailMessage"] _T = TypeVar("_T") -_PayloadType: TypeAlias = list[Message] | str | bytes +_PayloadType: TypeAlias = list[Message] | str | bytes | bytearray _CharsetType: TypeAlias = Charset | str | None _HeaderType: TypeAlias = Any diff --git a/mypy/typeshed/stdlib/email/mime/application.pyi b/mypy/typeshed/stdlib/email/mime/application.pyi index dfff85265ade..5ff60bff6ad2 100644 --- a/mypy/typeshed/stdlib/email/mime/application.pyi +++ b/mypy/typeshed/stdlib/email/mime/application.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): def __init__( self, - _data: str | bytes, + _data: str | bytes | bytearray, _subtype: str = ..., _encoder: Callable[[MIMEApplication], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/mime/audio.pyi b/mypy/typeshed/stdlib/email/mime/audio.pyi index b355d55070ad..05e173f5c4a1 100644 --- a/mypy/typeshed/stdlib/email/mime/audio.pyi +++ b/mypy/typeshed/stdlib/email/mime/audio.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): def __init__( self, - _audiodata: str | bytes, + _audiodata: str | bytes | bytearray, _subtype: str | None = ..., _encoder: Callable[[MIMEAudio], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/mime/image.pyi b/mypy/typeshed/stdlib/email/mime/image.pyi index f575103de2d6..7e46b835b541 100644 --- a/mypy/typeshed/stdlib/email/mime/image.pyi +++ b/mypy/typeshed/stdlib/email/mime/image.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): def __init__( self, - _imagedata: str | bytes, + _imagedata: str | bytes | bytearray, _subtype: str | None = ..., _encoder: Callable[[MIMEImage], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/parser.pyi b/mypy/typeshed/stdlib/email/parser.pyi index bf51c45728fd..1afd8940f4ef 100644 --- a/mypy/typeshed/stdlib/email/parser.pyi +++ b/mypy/typeshed/stdlib/email/parser.pyi @@ -16,6 +16,6 @@ class HeaderParser(Parser): ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... - def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = ...) -> Message: ... class BytesHeaderParser(BytesParser): ... diff --git a/mypy/typeshed/stdlib/email/quoprimime.pyi b/mypy/typeshed/stdlib/email/quoprimime.pyi index c5d324d17e13..ec0c799583bf 100644 --- a/mypy/typeshed/stdlib/email/quoprimime.pyi +++ b/mypy/typeshed/stdlib/email/quoprimime.pyi @@ -1,3 +1,5 @@ +from collections.abc import Iterable + __all__ = [ "body_decode", "body_encode", @@ -13,11 +15,11 @@ __all__ = [ def header_check(octet: int) -> bool: ... def body_check(octet: int) -> bool: ... -def header_length(bytearray: bytes) -> int: ... -def body_length(bytearray: bytes) -> int: ... -def unquote(s: str | bytes) -> str: ... -def quote(c: str | bytes) -> str: ... -def header_encode(header_bytes: bytes, charset: str = ...) -> str: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... +def quote(c: str | bytes | bytearray) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = ...) -> str: ... def body_encode(body: str, maxlinelen: int = ..., eol: str = ...) -> str: ... def decode(encoded: str, eol: str = ...) -> str: ... def header_decode(s: str) -> str: ... diff --git a/mypy/typeshed/stdlib/encodings/__init__.pyi b/mypy/typeshed/stdlib/encodings/__init__.pyi index d86466762268..2e83f0f65a71 100644 --- a/mypy/typeshed/stdlib/encodings/__init__.pyi +++ b/mypy/typeshed/stdlib/encodings/__init__.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from codecs import CodecInfo -from typing import Any class CodecRegistryError(LookupError, SystemError): ... @@ -7,4 +7,4 @@ def normalize_encoding(encoding: str | bytes) -> str: ... def search_function(encoding: str) -> CodecInfo | None: ... # Needed for submodules -def __getattr__(name: str) -> Any: ... # incomplete +def __getattr__(name: str) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8.pyi b/mypy/typeshed/stdlib/encodings/utf_8.pyi index 568fa6013373..8e73756199c1 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8.pyi @@ -1,11 +1,12 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: str, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): @staticmethod - def _buffer_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def _buffer_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): @staticmethod @@ -13,8 +14,8 @@ class StreamWriter(codecs.StreamWriter): class StreamReader(codecs.StreamReader): @staticmethod - def decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi index ad0d5bdc4fc7..27171063f53f 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -1,4 +1,5 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors: str = ...) -> None: ... @@ -8,14 +9,14 @@ class IncrementalEncoder(codecs.IncrementalEncoder): class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def __init__(self, errors: str = ...) -> None: ... - def _buffer_decode(self, input: bytes, errors: str | None, final: bool) -> tuple[str, int]: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): def encode(self, input: str, errors: str | None = ...) -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): - def decode(self, input: bytes, errors: str | None = ...) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... def encode(input: str, errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 69863bf580fa..2df16083c0b7 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -103,7 +103,7 @@ if sys.platform != "win32": @overload def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... @overload - def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: bytes) -> bytes: ... + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: str | ReadOnlyBuffer) -> bytes: ... @overload def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... @overload diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi index 75a70a5e7a07..580e605b6b38 100644 --- a/mypy/typeshed/stdlib/gzip.pyi +++ b/mypy/typeshed/stdlib/gzip.pyi @@ -1,9 +1,9 @@ import _compression import sys import zlib -from _typeshed import ReadableBuffer, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath, _BufferWithLen from io import FileIO -from typing import Any, Protocol, TextIO, overload +from typing import Protocol, TextIO, overload from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 8): @@ -26,15 +26,15 @@ FCOMMENT: int # actually Literal[16] # undocumented class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... - def seek(self, __n: int) -> Any: ... + def seek(self, __n: int) -> object: ... # The following attributes and methods are optional: # name: str # mode: str # def fileno() -> int: ... class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - def flush(self) -> Any: ... + def write(self, __b: bytes) -> object: ... + def flush(self) -> object: ... # The following attributes and methods are optional: # name: str # mode: str @@ -159,9 +159,9 @@ class _GzipReader(_compression.DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... if sys.version_info >= (3, 8): - def compress(data: bytes, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... else: - def compress(data: bytes, compresslevel: int = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = ...) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/mypy/typeshed/stdlib/hmac.pyi b/mypy/typeshed/stdlib/hmac.pyi index af69fc7ea46d..dc29836b6b87 100644 --- a/mypy/typeshed/stdlib/hmac.pyi +++ b/mypy/typeshed/stdlib/hmac.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, _BufferWithLen from collections.abc import Callable from types import ModuleType from typing import Any, AnyStr, overload @@ -18,19 +18,19 @@ if sys.version_info >= (3, 8): # In reality digestmod has a default value, but the function always throws an error # if the argument is not given, so we pretend it is a required argument. @overload - def new(key: bytes, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... @overload - def new(key: bytes, *, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... else: - def new(key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... class HMAC: digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... def update(self, msg: ReadableBuffer) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... @@ -40,4 +40,4 @@ class HMAC: def compare_digest(__a: ReadableBuffer, __b: ReadableBuffer) -> bool: ... @overload def compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... -def digest(key: bytes, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... +def digest(key: _BufferWithLen, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 2ce52eac9ad9..ad794ed9b073 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -2,10 +2,10 @@ import email.message import io import ssl import types -from _typeshed import Self, WriteableBuffer +from _typeshed import ReadableBuffer, Self, SupportsRead, WriteableBuffer from collections.abc import Callable, Iterable, Iterator, Mapping from socket import socket -from typing import IO, Any, BinaryIO, TypeVar, overload +from typing import Any, BinaryIO, TypeVar, overload from typing_extensions import TypeAlias __all__ = [ @@ -30,7 +30,7 @@ __all__ = [ "HTTPSConnection", ] -_DataType: TypeAlias = bytes | IO[Any] | Iterable[bytes] | str +_DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer _T = TypeVar("_T") HTTP_PORT: int @@ -164,7 +164,7 @@ class HTTPConnection: def putrequest(self, method: str, url: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... def endheaders(self, message_body: _DataType | None = ..., *, encode_chunked: bool = ...) -> None: ... - def send(self, data: _DataType) -> None: ... + def send(self, data: _DataType | str) -> None: ... class HTTPSConnection(HTTPConnection): # Can be `None` if `.connect()` was not called: diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index 40c94bf62f30..011d464b4653 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -1,6 +1,7 @@ import email.message import io import socketserver +import sys from _typeshed import StrPath, SupportsRead, SupportsWrite from collections.abc import Mapping, Sequence from typing import Any, AnyStr, BinaryIO, ClassVar @@ -31,7 +32,6 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def __init__(self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... def send_error(self, code: int, message: str | None = ..., explain: str | None = ...) -> None: ... @@ -51,9 +51,26 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: dict[str, str] - def __init__( - self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer, directory: str | None = ... - ) -> None: ... + if sys.version_info >= (3, 12): + def __init__( + self, + request: socketserver._RequestType, + client_address: socketserver._AddressType, + server: socketserver.BaseServer, + *, + directory: str | None = ..., + index_pages: Sequence[str] | None = ..., + ) -> None: ... + else: + def __init__( + self, + request: socketserver._RequestType, + client_address: socketserver._AddressType, + server: socketserver.BaseServer, + *, + directory: str | None = ..., + ) -> None: ... + def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index bd3d0777db15..f13e1c9b656c 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -1,7 +1,7 @@ import subprocess import sys import time -from _typeshed import Self +from _typeshed import ReadableBuffer, Self, _BufferWithLen from builtins import list as _list # conflicts with a method named "list" from collections.abc import Callable from datetime import datetime @@ -9,7 +9,7 @@ from re import Pattern from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType -from typing import IO, Any +from typing import IO, Any, SupportsAbs, SupportsInt from typing_extensions import Literal, TypeAlias __all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] @@ -54,12 +54,12 @@ class IMAP4: file: IO[str] | IO[bytes] def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... - def send(self, data: bytes) -> None: ... + def send(self, data: ReadableBuffer) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def recent(self) -> _CommandResults: ... def response(self, code: str) -> _CommandResults: ... - def append(self, mailbox: str, flags: str, date_time: str, message: bytes) -> str: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... def capability(self) -> _CommandResults: ... def check(self) -> _CommandResults: ... @@ -151,13 +151,13 @@ class IMAP4_stream(IMAP4): def open(self, host: str | None = ..., port: int | None = ...) -> None: ... class _Authenticator: - mech: Callable[[bytes], bytes] - def __init__(self, mechinst: Callable[[bytes], bytes]) -> None: ... + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] + def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... - def encode(self, inp: bytes) -> str: ... - def decode(self, inp: str) -> bytes: ... + def encode(self, inp: bytes | bytearray | memoryview) -> str: ... + def decode(self, inp: str | _BufferWithLen) -> bytes: ... -def Internaldate2tuple(resp: bytes) -> time.struct_time: ... -def Int2AP(num: int) -> str: ... -def ParseFlags(resp: bytes) -> tuple[bytes, ...]: ... +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/mypy/typeshed/stdlib/imp.pyi b/mypy/typeshed/stdlib/imp.pyi index 3054a4465f99..889f0cac4f9f 100644 --- a/mypy/typeshed/stdlib/imp.pyi +++ b/mypy/typeshed/stdlib/imp.pyi @@ -1,9 +1,4 @@ import types -from _typeshed import StrPath -from os import PathLike -from types import TracebackType -from typing import IO, Any, Protocol - from _imp import ( acquire_lock as acquire_lock, create_dynamic as create_dynamic, @@ -15,6 +10,10 @@ from _imp import ( lock_held as lock_held, release_lock as release_lock, ) +from _typeshed import StrPath +from os import PathLike +from types import TracebackType +from typing import IO, Any, Protocol SEARCH_ERROR: int PY_SOURCE: int diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 708037305c67..c961fb2e1f9e 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -1,12 +1,19 @@ import sys import types -from _typeshed import OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, +) from abc import ABCMeta, abstractmethod from collections.abc import Iterator, Mapping, Sequence from importlib.machinery import ModuleSpec from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal if sys.version_info >= (3, 11): __all__ = [ @@ -24,8 +31,6 @@ if sys.version_info >= (3, 11): "TraversableResources", ] -_Path: TypeAlias = bytes | str - class Finder(metaclass=ABCMeta): ... class Loader(metaclass=ABCMeta): @@ -38,7 +43,7 @@ class Loader(metaclass=ABCMeta): class ResourceLoader(Loader): @abstractmethod - def get_data(self, path: _Path) -> bytes: ... + def get_data(self, path: str) -> bytes: ... class InspectLoader(Loader): def is_package(self, fullname: str) -> bool: ... @@ -47,40 +52,40 @@ class InspectLoader(Loader): def get_source(self, fullname: str) -> str | None: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod - def source_to_code(data: bytes | str, path: str = ...) -> types.CodeType: ... + def source_to_code(data: ReadableBuffer | str, path: str = ...) -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod - def get_filename(self, fullname: str) -> _Path: ... + def get_filename(self, fullname: str) -> str: ... class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): - def path_mtime(self, path: _Path) -> float: ... - def set_data(self, path: _Path, data: bytes) -> None: ... + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... def get_source(self, fullname: str) -> str | None: ... - def path_stats(self, path: _Path) -> Mapping[str, Any]: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... # Please keep in sync with sys._MetaPathFinder class MetaPathFinder(Finder): - def find_module(self, fullname: str, path: Sequence[_Path] | None) -> Loader | None: ... + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( - self, fullname: str, path: Sequence[_Path] | None, target: types.ModuleType | None = ... + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[_Path]]: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): name: str - path: _Path - def __init__(self, fullname: str, path: _Path) -> None: ... - def get_data(self, path: _Path) -> bytes: ... - def get_filename(self, name: str | None = ...) -> _Path: ... + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, name: str | None = ...) -> str: ... def load_module(self, name: str | None = ...) -> types.ModuleType: ... class ResourceReader(metaclass=ABCMeta): @@ -174,6 +179,7 @@ if sys.version_info >= (3, 9): self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> IO[Any]: ... @property + @abstractmethod def name(self) -> str: ... @abstractmethod def __truediv__(self, child: str) -> Traversable: ... diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index ba6ed30629e0..6e253521bc0f 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -1,6 +1,7 @@ import importlib.abc import sys import types +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable, Sequence from typing import Any @@ -31,10 +32,10 @@ class ModuleSpec: class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -62,10 +63,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -91,10 +92,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... class PathFinder: @@ -113,10 +114,10 @@ class PathFinder: @classmethod def find_spec( - cls, fullname: str, path: Sequence[bytes | str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... @classmethod - def find_module(cls, fullname: str, path: Sequence[bytes | str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] @@ -135,13 +136,13 @@ class FileFinder(importlib.abc.PathEntryFinder): ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): - def set_data(self, path: importlib.abc._Path, data: bytes, *, _mode: int = ...) -> None: ... + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = ...) -> None: ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): - def __init__(self, name: str, path: importlib.abc._Path) -> None: ... - def get_filename(self, name: str | None = ...) -> importlib.abc._Path: ... + def __init__(self, name: str, path: str) -> None: ... + def get_filename(self, name: str | None = ...) -> str: ... def get_source(self, fullname: str) -> None: ... def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi index 4d75032ab44a..e9c08aeccf87 100644 --- a/mypy/typeshed/stdlib/importlib/util.pyi +++ b/mypy/typeshed/stdlib/importlib/util.pyi @@ -1,7 +1,7 @@ import importlib.abc import importlib.machinery import types -from _typeshed import StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable from typing import Any from typing_extensions import ParamSpec @@ -17,7 +17,7 @@ MAGIC_NUMBER: bytes def cache_from_source(path: str, debug_override: bool | None = ..., *, optimization: Any | None = ...) -> str: ... def source_from_cache(path: str) -> str: ... -def decode_source(source_bytes: bytes) -> str: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... def find_spec(name: str, package: str | None = ...) -> importlib.machinery.ModuleSpec | None: ... def spec_from_loader( name: str, loader: importlib.abc.Loader | None, *, origin: str | None = ..., is_package: bool | None = ... @@ -37,4 +37,4 @@ class LazyLoader(importlib.abc.Loader): def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... def exec_module(self, module: types.ModuleType) -> None: ... -def source_hash(source_bytes: bytes) -> int: ... +def source_hash(source_bytes: ReadableBuffer) -> int: ... diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index b97bc601271a..ad68aa93c894 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -585,7 +585,7 @@ _Object: TypeAlias = object class Attribute(NamedTuple): name: str - kind: str + kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type object: _Object diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index 3e9a6cd6861d..9c4c769fe34b 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -103,7 +103,7 @@ class FileIO(RawIOBase, BinaryIO): def __enter__(self: Self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): - def __init__(self, initial_bytes: bytes = ...) -> None: ... + def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. @@ -190,7 +190,7 @@ class StringIO(TextIOWrapper): class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... - def decode(self, input: bytes | str, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer | str, final: bool = ...) -> str: ... @property def newlines(self) -> str | tuple[str, ...] | None: ... def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 2c0292d6fbae..6580ba4f1ac4 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -18,7 +18,9 @@ def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... def ip_network( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = ... ) -> IPv4Network | IPv6Network: ... -def ip_interface(address: _RawIPAddress | _RawNetworkPart) -> IPv4Interface | IPv6Interface: ... +def ip_interface( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int] +) -> IPv4Interface | IPv6Interface: ... class _IPAddressBase: @property diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index 2fd87622e1fe..64ab8a11a45d 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -37,7 +37,7 @@ def dump( **kwds: Any, ) -> None: ... def loads( - s: str | bytes, + s: str | bytes | bytearray, *, cls: type[JSONDecoder] | None = ..., object_hook: Callable[[dict[Any, Any]], Any] | None = ..., @@ -58,4 +58,4 @@ def load( object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., **kwds: Any, ) -> Any: ... -def detect_encoding(b: bytes) -> str: ... # undocumented +def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index eec4ed96953a..f01c67d13fe9 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -2,7 +2,7 @@ import datetime import http.client import ssl import sys -from _typeshed import StrPath +from _typeshed import ReadableBuffer, StrPath from collections.abc import Callable from logging import FileHandler, Handler, LogRecord from queue import Queue, SimpleQueue @@ -125,7 +125,7 @@ class SocketHandler(Handler): def __init__(self, host: str, port: int | None) -> None: ... def makeSocket(self, timeout: float = ...) -> socket: ... # timeout is undocumented def makePickle(self, record: LogRecord) -> bytes: ... - def send(self, s: bytes) -> None: ... + def send(self, s: ReadableBuffer) -> None: ... def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index 868da0f05567..9d75c627f76d 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -83,7 +83,7 @@ PRESET_EXTREME: int # v big number @final class LZMADecompressor: def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... @property def check(self) -> int: ... @property @@ -99,7 +99,7 @@ class LZMACompressor: def __init__( self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... ) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... class LZMAError(Exception): ... @@ -189,7 +189,9 @@ def open( newline: str | None = ..., ) -> LZMAFile | TextIO: ... def compress( - data: bytes, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... + data: ReadableBuffer, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ... ) -> bytes: ... -def decompress(data: bytes, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> bytes: ... def is_check_supported(__check_id: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 3169e8cfa689..29cea5cadbb0 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -1,6 +1,7 @@ import email.message +import io import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import Self, StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from types import TracebackType @@ -32,7 +33,10 @@ __all__ = [ _T = TypeVar("_T") _MessageT = TypeVar("_MessageT", bound=Message) -_MessageData: TypeAlias = email.message.Message | bytes | str | IO[str] | IO[bytes] + +class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +_MessageData: TypeAlias = email.message.Message | bytes | str | io.StringIO | _SupportsReadAndReadline class _HasIteritems(Protocol): def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... @@ -43,13 +47,12 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): - - _path: bytes | str # undocumented + _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... @overload - def __init__(self, path: StrOrBytesPath, factory: None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: None = ..., create: bool = ...) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: ... @abstractmethod @@ -105,7 +108,7 @@ class Maildir(Mailbox[MaildirMessage]): colon: str def __init__( - self, dirname: StrOrBytesPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... ) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... @@ -146,19 +149,13 @@ class _mboxMMDF(_singlefileMailbox[_MessageT]): def get_string(self, key: str, from_: bool = ...) -> str: ... class mbox(_mboxMMDF[mboxMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ...) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ...) -> None: ... class MH(Mailbox[MHMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ...) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... def __setitem__(self, key: str, message: _MessageData) -> None: ... @@ -173,17 +170,15 @@ class MH(Mailbox[MHMessage]): def unlock(self) -> None: ... def close(self) -> None: ... def list_folders(self) -> list[str]: ... - def get_folder(self, folder: StrOrBytesPath) -> MH: ... - def add_folder(self, folder: StrOrBytesPath) -> MH: ... - def remove_folder(self, folder: StrOrBytesPath) -> None: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... def get_sequences(self) -> dict[str, list[int]]: ... def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ...) -> None: ... def get_message(self, key: str) -> BabylMessage: ... def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> IO[bytes]: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index b2fde674a647..d68cdd143109 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -1,8 +1,9 @@ -from typing import IO, Any +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from typing import Any version: int -def dump(__value: Any, __file: IO[Any], __version: int = ...) -> None: ... -def load(__file: IO[Any]) -> Any: ... +def dump(__value: Any, __file: SupportsWrite[bytes], __version: int = ...) -> None: ... +def load(__file: SupportsRead[bytes]) -> Any: ... def dumps(__value: Any, __version: int = ...) -> bytes: ... -def loads(__bytes: bytes) -> Any: ... +def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi index 58eda98d8977..ca30acd7e97d 100644 --- a/mypy/typeshed/stdlib/math.pyi +++ b/mypy/typeshed/stdlib/math.pyi @@ -1,9 +1,11 @@ import sys -from _typeshed import SupportsTrunc from collections.abc import Iterable -from typing import SupportsFloat, overload +from typing import Protocol, SupportsFloat, TypeVar, overload from typing_extensions import SupportsIndex, TypeAlias +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + if sys.version_info >= (3, 8): _SupportsFloatOrIndex: TypeAlias = SupportsFloat | SupportsIndex else: @@ -26,6 +28,12 @@ def atanh(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 11): def cbrt(__x: _SupportsFloatOrIndex) -> float: ... +class _SupportsCeil(Protocol[_T_co]): + def __ceil__(self) -> _T_co: ... + +@overload +def ceil(__x: _SupportsCeil[_T]) -> _T: ... +@overload def ceil(__x: _SupportsFloatOrIndex) -> int: ... if sys.version_info >= (3, 8): @@ -55,6 +63,12 @@ if sys.version_info >= (3, 8): else: def factorial(__x: int) -> int: ... +class _SupportsFloor(Protocol[_T_co]): + def __floor__(self) -> _T_co: ... + +@overload +def floor(__x: _SupportsFloor[_T]) -> _T: ... +@overload def floor(__x: _SupportsFloatOrIndex) -> int: ... def fmod(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... def frexp(__x: _SupportsFloatOrIndex) -> tuple[float, int]: ... @@ -119,7 +133,12 @@ def sinh(__x: _SupportsFloatOrIndex) -> float: ... def sqrt(__x: _SupportsFloatOrIndex) -> float: ... def tan(__x: _SupportsFloatOrIndex) -> float: ... def tanh(__x: _SupportsFloatOrIndex) -> float: ... -def trunc(__x: SupportsTrunc) -> int: ... + +# Is different from `_typeshed.SupportsTrunc`, which is not generic +class _SupportsTrunc(Protocol[_T_co]): + def __trunc__(self) -> _T_co: ... + +def trunc(__x: _SupportsTrunc[_T]) -> _T: ... if sys.version_info >= (3, 9): def ulp(__x: _SupportsFloatOrIndex) -> float: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 8dbec2388838..30084b85bc51 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -67,8 +67,11 @@ class mmap(Iterable[int], Sized): def __setitem__(self, __index: int, __object: int) -> None: ... @overload def __setitem__(self, __index: slice, __object: ReadableBuffer) -> None: ... - # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and - # __len__, so we claim that there is also an __iter__ to help type checkers. + # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, + # so we claim that there is also a __contains__ to help type checkers. + def __contains__(self, __o: object) -> bool: ... + # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, + # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... def __enter__(self: Self) -> Self: ... def __exit__(self, *args: object) -> None: ... diff --git a/mypy/typeshed/stdlib/msvcrt.pyi b/mypy/typeshed/stdlib/msvcrt.pyi index 0bea8ce22b06..5849b9b00ca0 100644 --- a/mypy/typeshed/stdlib/msvcrt.pyi +++ b/mypy/typeshed/stdlib/msvcrt.pyi @@ -21,8 +21,8 @@ if sys.platform == "win32": def getwch() -> str: ... def getche() -> bytes: ... def getwche() -> str: ... - def putch(__char: bytes) -> None: ... + def putch(__char: bytes | bytearray) -> None: ... def putwch(__unicode_char: str) -> None: ... - def ungetch(__char: bytes) -> None: ... + def ungetch(__char: bytes | bytearray) -> None: ... def ungetwch(__unicode_char: str) -> None: ... def heapmin() -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index cc9f5cf8f890..5036f0ef222b 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -1,7 +1,7 @@ import socket import sys import types -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Iterable from typing import Any, Union from typing_extensions import SupportsIndex, TypeAlias @@ -21,7 +21,7 @@ class _ConnectionBase: def writable(self) -> bool: ... # undocumented def fileno(self) -> int: ... def close(self) -> None: ... - def send_bytes(self, buf: bytes, offset: int = ..., size: int | None = ...) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = ..., size: int | None = ...) -> None: ... def send(self, obj: Any) -> None: ... def recv_bytes(self, maxlength: int | None = ...) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... diff --git a/mypy/typeshed/stdlib/operator.pyi b/mypy/typeshed/stdlib/operator.pyi index c3fc4b0a8503..a0e5df7977da 100644 --- a/mypy/typeshed/stdlib/operator.pyi +++ b/mypy/typeshed/stdlib/operator.pyi @@ -1,5 +1,4 @@ import sys - from _operator import * __all__ = [ diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 6f51d4e7aa50..590d20576665 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -503,11 +503,14 @@ if sys.platform != "win32": def getenvb(key: bytes) -> bytes | None: ... @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(__name: StrOrBytesPath, __value: StrOrBytesPath) -> None: ... + def unsetenv(__name: StrOrBytesPath) -> None: ... -def putenv(__name: bytes | str, __value: bytes | str) -> None: ... +else: + def putenv(__name: str, __value: str) -> None: ... -if sys.platform != "win32" or sys.version_info >= (3, 9): - def unsetenv(__name: bytes | str) -> None: ... + if sys.version_info >= (3, 9): + def unsetenv(__name: str) -> None: ... _Opener: TypeAlias = Callable[[str, int], int] @@ -622,7 +625,7 @@ if sys.platform != "win32": def posix_fadvise(__fd: int, __offset: int, __length: int, __advice: int) -> None: ... def pread(__fd: int, __length: int, __offset: int) -> bytes: ... - def pwrite(__fd: int, __buffer: bytes, __offset: int) -> int: ... + def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = ...) -> int: ... def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = ...) -> int: ... @@ -641,8 +644,8 @@ if sys.platform != "win32": in_fd: int, offset: int, count: int, - headers: Sequence[bytes] = ..., - trailers: Sequence[bytes] = ..., + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., flags: int = ..., ) -> int: ... # FreeBSD and Mac OS X only def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... @@ -671,7 +674,7 @@ if sys.platform != "win32": def tcsetpgrp(__fd: int, __pgid: int) -> None: ... def ttyname(__fd: int) -> str: ... -def write(__fd: int, __data: bytes) -> int: ... +def write(__fd: int, __data: ReadableBuffer) -> int: ... def access( path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... ) -> bool: ... @@ -775,14 +778,19 @@ if sys.platform != "win32": ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( - top: bytes, topdown: bool = ..., onerror: _OnError | None = ..., *, follow_symlinks: bool = ..., dir_fd: int | None = ... + top: BytesPath, + topdown: bool = ..., + onerror: _OnError | None = ..., + *, + follow_symlinks: bool = ..., + dir_fd: int | None = ..., ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": def getxattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... def listxattr(path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... def removexattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... def setxattr( - path: _FdOrAnyPath, attribute: StrOrBytesPath, value: bytes, flags: int = ..., *, follow_symlinks: bool = ... + path: _FdOrAnyPath, attribute: StrOrBytesPath, value: ReadableBuffer, flags: int = ..., *, follow_symlinks: bool = ... ) -> None: ... def abort() -> NoReturn: ... @@ -810,6 +818,10 @@ _ExecVArgs: TypeAlias = ( | list[str | PathLike[Any]] | list[bytes | str | PathLike[Any]] ) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 2a0f1760cae5..79c2352a0f85 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -5,10 +5,12 @@ from _typeshed import ( OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, + ReadableBuffer, Self, + StrOrBytesPath, StrPath, ) -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import TracebackType @@ -188,16 +190,20 @@ class Path(PurePath): def expanduser(self: Self) -> Self: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: str | None = ..., errors: str | None = ...) -> str: ... - def samefile(self, other_path: str | bytes | int | Path) -> bool: ... - def write_bytes(self, data: bytes) -> int: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): def write_text( self, data: str, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> int: ... else: def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... - if sys.version_info >= (3, 8): - def link_to(self, target: StrPath | bytes) -> None: ... + if sys.version_info >= (3, 8) and sys.version_info < (3, 12): + def link_to(self, target: StrOrBytesPath) -> None: ... + if sys.version_info >= (3, 12): + def walk( + self: Self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index 9a94e9eced3c..f393452069a3 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from typing import Any, ClassVar, Protocol, SupportsBytes, Union from typing_extensions import SupportsIndex, TypeAlias, final @@ -97,9 +97,6 @@ class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... def readline(self) -> bytes: ... -class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - if sys.version_info >= (3, 8): @final class PickleBuffer: @@ -109,7 +106,7 @@ if sys.version_info >= (3, 8): _BufferCallback: TypeAlias = Callable[[PickleBuffer], Any] | None def dump( obj: Any, - file: _WritableFileobj, + file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., @@ -136,7 +133,7 @@ if sys.version_info >= (3, 8): ) -> Any: ... else: - def dump(obj: Any, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dumps(obj: Any, protocol: int | None = ..., *, fix_imports: bool = ...) -> bytes: ... def load(file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... def loads(data: ReadableBuffer, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... @@ -162,7 +159,7 @@ class Pickler: if sys.version_info >= (3, 8): def __init__( self, - file: _WritableFileobj, + file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., @@ -170,7 +167,7 @@ class Pickler: ) -> None: ... def reducer_override(self, obj: Any) -> Any: ... else: - def __init__(self, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dump(self, __obj: Any) -> None: ... def clear_memo(self) -> None: ... diff --git a/mypy/typeshed/stdlib/pickletools.pyi b/mypy/typeshed/stdlib/pickletools.pyi index c78848464237..2f0d5f12f8a3 100644 --- a/mypy/typeshed/stdlib/pickletools.pyi +++ b/mypy/typeshed/stdlib/pickletools.pyi @@ -156,10 +156,10 @@ class OpcodeInfo: opcodes: list[OpcodeInfo] -def genops(pickle: bytes | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... -def optimize(p: bytes | IO[bytes]) -> bytes: ... +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... def dis( - pickle: bytes | IO[bytes], + pickle: bytes | bytearray | IO[bytes], out: IO[str] | None = ..., memo: MutableMapping[int, Any] | None = ..., indentlevel: int = ..., diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 9dcfcdb126cb..4ec9cbd5a31c 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Mapping, MutableMapping from datetime import datetime from enum import Enum @@ -48,7 +48,9 @@ FMT_BINARY = PlistFormat.FMT_BINARY if sys.version_info >= (3, 9): def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... - def loads(value: bytes, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... else: def load( @@ -59,7 +61,7 @@ else: dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def loads( - value: bytes, + value: ReadableBuffer, *, fmt: PlistFormat | None = ..., use_builtin_types: bool = ..., @@ -67,7 +69,7 @@ else: ) -> Any: ... def dump( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, fp: IO[bytes], *, fmt: PlistFormat = ..., @@ -75,7 +77,7 @@ def dump( skipkeys: bool = ..., ) -> None: ... def dumps( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., skipkeys: bool = ..., @@ -85,7 +87,7 @@ def dumps( if sys.version_info < (3, 9): def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... def writePlist(value: Mapping[str, Any], pathOrFile: str | IO[bytes]) -> None: ... - def readPlistFromBytes(data: bytes) -> Any: ... + def readPlistFromBytes(data: ReadableBuffer) -> Any: ... def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3, 9): diff --git a/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/mypy/typeshed/stdlib/pyexpat/__init__.pyi index 6a4ed891fe10..7e635c58c933 100644 --- a/mypy/typeshed/stdlib/pyexpat/__init__.pyi +++ b/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,7 +1,6 @@ -import pyexpat.errors as errors -import pyexpat.model as model -from _typeshed import SupportsRead +from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable +from pyexpat import errors as errors, model as model from typing import Any from typing_extensions import TypeAlias, final @@ -25,7 +24,7 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - def Parse(self, __data: str | bytes, __isfinal: bool = ...) -> int: ... + def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = ...) -> int: ... def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... def SetBase(self, __base: str) -> None: ... def GetBase(self) -> str | None: ... diff --git a/mypy/typeshed/stdlib/quopri.pyi b/mypy/typeshed/stdlib/quopri.pyi index b8dc0787fd1a..549413226bdb 100644 --- a/mypy/typeshed/stdlib/quopri.pyi +++ b/mypy/typeshed/stdlib/quopri.pyi @@ -1,8 +1,11 @@ -from typing import BinaryIO +from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite +from typing import Protocol __all__ = ["encode", "decode", "encodestring", "decodestring"] -def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... -def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... -def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... -def decodestring(s: bytes, header: int = ...) -> bytes: ... +class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: int = ...) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: int = ..., header: int = ...) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: int = ...) -> None: ... +def decodestring(s: str | ReadableBuffer, header: int = ...) -> bytes: ... diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index 2d03b60e7bb4..9fedd6f316d1 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -1,5 +1,6 @@ import sys -from _typeshed import Self +from _socket import _Address as _SourceAddress +from _typeshed import ReadableBuffer, Self, _BufferWithLen from collections.abc import Sequence from email.message import Message as _Message from re import Pattern @@ -28,8 +29,6 @@ __all__ = [ _Reply: TypeAlias = tuple[int, bytes] _SendErrs: TypeAlias = dict[str, _Reply] -# Should match source_address for socket.create_connection -_SourceAddress: TypeAlias = tuple[bytearray | bytes | str, int] SMTP_PORT: int SMTP_SSL_PORT: int @@ -102,7 +101,7 @@ class SMTP: ) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def connect(self, host: str = ..., port: int = ..., source_address: _SourceAddress | None = ...) -> _Reply: ... - def send(self, s: bytes | str) -> None: ... + def send(self, s: ReadableBuffer | str) -> None: ... def putcmd(self, cmd: str, args: str = ...) -> None: ... def getreply(self) -> _Reply: ... def docmd(self, cmd: str, args: str = ...) -> _Reply: ... @@ -114,7 +113,7 @@ class SMTP: def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... - def data(self, msg: bytes | str) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... def verify(self, address: str) -> _Reply: ... vrfy = verify def expn(self, address: str) -> _Reply: ... @@ -125,16 +124,16 @@ class SMTP: @overload def auth_cram_md5(self, challenge: None = ...) -> None: ... @overload - def auth_cram_md5(self, challenge: bytes) -> str: ... - def auth_plain(self, challenge: bytes | None = ...) -> str: ... - def auth_login(self, challenge: bytes | None = ...) -> str: ... + def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... + def auth_plain(self, challenge: ReadableBuffer | None = ...) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = ...) -> str: ... def login(self, user: str, password: str, *, initial_response_ok: bool = ...) -> _Reply: ... def starttls(self, keyfile: str | None = ..., certfile: str | None = ..., context: SSLContext | None = ...) -> _Reply: ... def sendmail( self, from_addr: str, to_addrs: str | Sequence[str], - msg: bytes | str, + msg: _BufferWithLen | str, mail_options: Sequence[str] = ..., rcpt_options: Sequence[str] = ..., ) -> _SendErrs: ... diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 89a6d059f165..678bdafb25f0 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -1,15 +1,8 @@ -import sys -from _typeshed import ReadableBuffer, Self, WriteableBuffer -from collections.abc import Iterable -from enum import IntEnum, IntFlag -from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper -from typing import Any, Protocol, overload -from typing_extensions import Literal - # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. import _socket +import sys from _socket import ( _FD, EAI_AGAIN as EAI_AGAIN, @@ -119,6 +112,12 @@ from _socket import ( setdefaulttimeout as setdefaulttimeout, timeout as timeout, ) +from _typeshed import ReadableBuffer, Self, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload +from typing_extensions import Literal if sys.platform != "darwin" or sys.version_info >= (3, 9): from _socket import ( @@ -738,7 +737,7 @@ if sys.platform != "win32": if sys.version_info >= (3, 9): # flags and address appear to be unused in send_fds and recv_fds def send_fds( - sock: socket, buffers: Iterable[bytes], fds: bytes | Iterable[int], flags: int = ..., address: None = ... + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: int = ..., address: None = ... ) -> int: ... def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = ...) -> tuple[bytes, list[int], int, Any]: ... @@ -768,16 +767,14 @@ if sys.version_info >= (3, 11): def create_connection( address: tuple[str | None, int], timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + source_address: _Address | None = ..., *, all_errors: bool = ..., ) -> socket: ... else: def create_connection( - address: tuple[str | None, int], - timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = ... # noqa: F811 ) -> socket: ... if sys.version_info >= (3, 8): @@ -788,5 +785,10 @@ if sys.version_info >= (3, 8): # the 5th tuple item is an address def getaddrinfo( - host: bytes | str | None, port: str | int | None, family: int = ..., type: int = ..., proto: int = ..., flags: int = ... + host: bytes | str | None, + port: bytes | str | int | None, + family: int = ..., + type: int = ..., + proto: int = ..., + flags: int = ..., ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 189e796de109..efda3b671ed5 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -1,6 +1,6 @@ import sqlite3 import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem +from _typeshed import Incomplete, ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType @@ -227,9 +227,9 @@ else: if sys.version_info < (3, 8): class Cache: - def __init__(self, *args, **kwargs) -> None: ... - def display(self, *args, **kwargs) -> None: ... - def get(self, *args, **kwargs) -> None: ... + def __init__(self, *args: Incomplete, **kwargs: object) -> None: ... + def display(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + def get(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... class _AggregateProtocol(Protocol): def step(self, __value: int) -> object: ... @@ -437,7 +437,7 @@ if sys.version_info >= (3, 8): else: @final class Statement: - def __init__(self, *args, **kwargs): ... + def __init__(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... _Statement: TypeAlias = Statement class Warning(Exception): ... @@ -447,7 +447,7 @@ if sys.version_info >= (3, 11): class Blob: def close(self) -> None: ... def read(self, __length: int = ...) -> bytes: ... - def write(self, __data: bytes) -> None: ... + def write(self, __data: ReadableBuffer) -> None: ... def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END def seek(self, __offset: int, __origin: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 6443a6ea61ba..543433f2fbd0 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -11,7 +11,7 @@ _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] _PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] _PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] -_PasswordType: TypeAlias = Union[Callable[[], str | bytes], str, bytes] +_PasswordType: TypeAlias = Union[Callable[[], str | bytes | bytearray], str, bytes, bytearray] _SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] @@ -61,7 +61,7 @@ def create_default_context( *, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., + cadata: str | ReadableBuffer | None = ..., ) -> SSLContext: ... def _create_unverified_context( protocol: int = ..., @@ -73,7 +73,7 @@ def _create_unverified_context( keyfile: StrOrBytesPath | None = ..., cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., + cadata: str | ReadableBuffer | None = ..., ) -> SSLContext: ... _create_default_https_context: Callable[..., SSLContext] @@ -82,8 +82,11 @@ def RAND_bytes(__num: int) -> bytes: ... def RAND_pseudo_bytes(__num: int) -> tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... -def RAND_add(__s: bytes, __entropy: float) -> None: ... -def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... +def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ... + +if sys.version_info < (3, 12): + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + def cert_time_to_seconds(cert_time: str) -> int: ... if sys.version_info >= (3, 10): @@ -94,7 +97,7 @@ if sys.version_info >= (3, 10): else: def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... -def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... class DefaultVerifyPaths(NamedTuple): @@ -290,8 +293,8 @@ class SSLSocket(socket.socket): @property def session_reused(self) -> bool | None: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def connect(self, addr: socket._Address | bytes) -> None: ... - def connect_ex(self, addr: socket._Address | bytes) -> int: ... + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... def recv(self, buflen: int = ..., flags: int = ...) -> bytes: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ...) -> int: ... def recvfrom(self, buflen: int = ..., flags: int = ...) -> tuple[bytes, socket._RetAddress]: ... @@ -301,12 +304,12 @@ class SSLSocket(socket.socket): def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = ...) -> int: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: int | socket._Address, addr: socket._Address | None = ...) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... def do_handshake(self, block: bool = ...) -> None: ... # block is undocumented @overload def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... @@ -362,7 +365,7 @@ class SSLContext: ) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( - self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | bytes | None = ... + self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | ReadableBuffer | None = ... ) -> None: ... @overload def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... @@ -408,7 +411,7 @@ class SSLObject: def session_reused(self) -> bool: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... @overload def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... @overload @@ -433,16 +436,21 @@ class MemoryBIO: pending: int eof: bool def read(self, __size: int = ...) -> bytes: ... - def write(self, __buf: bytes) -> int: ... + def write(self, __buf: ReadableBuffer) -> int: ... def write_eof(self) -> None: ... @final class SSLSession: - id: bytes - time: int - timeout: int - ticket_lifetime_hint: int - has_ticket: bool + @property + def has_ticket(self) -> bool: ... + @property + def id(self) -> bytes: ... + @property + def ticket_lifetime_hint(self) -> int: ... + @property + def time(self) -> int: ... + @property + def timeout(self) -> int: ... class SSLErrorNumber(enum.IntEnum): SSL_ERROR_EOF: int diff --git a/mypy/typeshed/stdlib/struct.pyi b/mypy/typeshed/stdlib/struct.pyi index 74afddd74262..02097384e0f7 100644 --- a/mypy/typeshed/stdlib/struct.pyi +++ b/mypy/typeshed/stdlib/struct.pyi @@ -6,8 +6,8 @@ __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpac class error(Exception): ... -def pack(fmt: str | bytes, *v: Any) -> bytes: ... -def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... +def pack(__fmt: str | bytes, *v: Any) -> bytes: ... +def pack_into(__fmt: str | bytes, __buffer: WriteableBuffer, __offset: int, *v: Any) -> None: ... def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 8855e1a953db..5ad5af7f20bd 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -346,7 +346,7 @@ class TarInfo: pax_headers: Mapping[str, str] def __init__(self, name: str = ...) -> None: ... @classmethod - def frombuf(cls: Type[Self], buf: bytes, encoding: str, errors: str) -> Self: ... + def frombuf(cls: Type[Self], buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod def fromtarfile(cls: Type[Self], tarfile: TarFile) -> Self: ... @property diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi index 494162a49b38..bf8d7bee2473 100644 --- a/mypy/typeshed/stdlib/termios.pyi +++ b/mypy/typeshed/stdlib/termios.pyi @@ -4,9 +4,9 @@ from typing import Any from typing_extensions import TypeAlias if sys.platform != "win32": + # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. _Attr: TypeAlias = list[int | list[bytes | int]] - # TODO constants not really documented B0: int B1000000: int B110: int @@ -44,17 +44,22 @@ if sys.platform != "win32": BSDLY: int CBAUD: int CBAUDEX: int + CDEL: int CDSUSP: int CEOF: int CEOL: int + CEOL2: int CEOT: int CERASE: int + CESC: int CFLUSH: int CIBAUD: int CINTR: int CKILL: int CLNEXT: int CLOCAL: int + CNUL: int + COMMON: int CQUIT: int CR0: int CR1: int @@ -73,6 +78,7 @@ if sys.platform != "win32": CSTOP: int CSTOPB: int CSUSP: int + CSWTCH: int CWERASE: int ECHO: int ECHOCTL: int @@ -93,6 +99,7 @@ if sys.platform != "win32": FIONREAD: int FLUSHO: int HUPCL: int + IBSHIFT: int ICANON: int ICRNL: int IEXTEN: int @@ -100,6 +107,7 @@ if sys.platform != "win32": IGNCR: int IGNPAR: int IMAXBEL: int + INIT_C_CC: int INLCR: int INPCK: int IOCSIZE_MASK: int @@ -110,17 +118,18 @@ if sys.platform != "win32": IXANY: int IXOFF: int IXON: int + N_MOUSE: int + N_PPP: int + N_SLIP: int + N_STRIP: int + N_TTY: int NCC: int NCCS: int NL0: int NL1: int NLDLY: int NOFLSH: int - N_MOUSE: int - N_PPP: int - N_SLIP: int - N_STRIP: int - N_TTY: int + NSWTCH: int OCRNL: int OFDEL: int OFILL: int @@ -151,6 +160,7 @@ if sys.platform != "win32": TCSADRAIN: int TCSAFLUSH: int TCSANOW: int + TCSASOFT: int TCSBRK: int TCSBRKP: int TCSETA: int @@ -167,15 +177,11 @@ if sys.platform != "win32": TIOCGLCKTRMIOS: int TIOCGPGRP: int TIOCGSERIAL: int + TIOCGSIZE: int TIOCGSOFTCAR: int TIOCGWINSZ: int TIOCINQ: int TIOCLINUX: int - TIOCMBIC: int - TIOCMBIS: int - TIOCMGET: int - TIOCMIWAIT: int - TIOCMSET: int TIOCM_CAR: int TIOCM_CD: int TIOCM_CTS: int @@ -187,10 +193,14 @@ if sys.platform != "win32": TIOCM_RTS: int TIOCM_SR: int TIOCM_ST: int + TIOCMBIC: int + TIOCMBIS: int + TIOCMGET: int + TIOCMIWAIT: int + TIOCMSET: int TIOCNOTTY: int TIOCNXCL: int TIOCOUTQ: int - TIOCPKT: int TIOCPKT_DATA: int TIOCPKT_DOSTOP: int TIOCPKT_FLUSHREAD: int @@ -198,7 +208,9 @@ if sys.platform != "win32": TIOCPKT_NOSTOP: int TIOCPKT_START: int TIOCPKT_STOP: int + TIOCPKT: int TIOCSCTTY: int + TIOCSER_TEMT: int TIOCSERCONFIG: int TIOCSERGETLSR: int TIOCSERGETMULTI: int @@ -206,14 +218,15 @@ if sys.platform != "win32": TIOCSERGWILD: int TIOCSERSETMULTI: int TIOCSERSWILD: int - TIOCSER_TEMT: int TIOCSETD: int TIOCSLCKTRMIOS: int TIOCSPGRP: int TIOCSSERIAL: int + TIOCSSIZE: int TIOCSSOFTCAR: int TIOCSTI: int TIOCSWINSZ: int + TIOCTTYGSTRUCT: int TOSTOP: int VDISCARD: int VEOF: int @@ -238,7 +251,8 @@ if sys.platform != "win32": VWERASE: int XCASE: int XTABS: int - def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... + + def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... def tcdrain(__fd: FileDescriptorLike) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/commondialog.pyi b/mypy/typeshed/stdlib/tkinter/commondialog.pyi index 49101c7e6089..edae62582237 100644 --- a/mypy/typeshed/stdlib/tkinter/commondialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/commondialog.pyi @@ -10,5 +10,5 @@ class Dialog: command: ClassVar[str | None] master: Incomplete | None options: Mapping[str, Incomplete] - def __init__(self, master: Incomplete | None = ..., **options) -> None: ... - def show(self, **options): ... + def __init__(self, master: Incomplete | None = ..., **options: Incomplete) -> None: ... + def show(self, **options: Incomplete) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/tkinter/dialog.pyi b/mypy/typeshed/stdlib/tkinter/dialog.pyi index ef7713f40994..032dac2c15a2 100644 --- a/mypy/typeshed/stdlib/tkinter/dialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -12,5 +12,5 @@ DIALOG_ICON: str class Dialog(Widget): widgetName: str num: int - def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... def destroy(self) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi index e2cfc43f606a..ad7972968f81 100644 --- a/mypy/typeshed/stdlib/tkinter/dnd.pyi +++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi @@ -16,4 +16,4 @@ class DndHandler: def on_motion(self, event: Event[Misc]) -> None: ... def on_release(self, event: Event[Misc]) -> None: ... -def dnd_start(source, event) -> DndHandler | None: ... +def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ... diff --git a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi index 72f6ca8c0687..4d8a7004c6b9 100644 --- a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] @@ -6,4 +7,4 @@ __all__ = ["ScrolledText"] class ScrolledText(Text): frame: Frame vbar: Scrollbar - def __init__(self, master: Misc | None = ..., **kwargs) -> None: ... + def __init__(self, master: Misc | None = ..., **kwargs: Incomplete) -> None: ... diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index 6f242a6cd1ef..7c00b507a528 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -122,8 +122,8 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode def untokenize(iterable: Iterable[_Token]) -> Any: ... -def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ... -def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented def open(filename: StrOrBytesPath | int) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 16fe096d3117..4047cf84593d 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -585,13 +585,15 @@ if sys.version_info >= (3, 9): @property def __parameters__(self) -> tuple[Any, ...]: ... def __init__(self, origin: type, args: Any) -> None: ... + def __getitem__(self, __typeargs: Any) -> GenericAlias: ... if sys.version_info >= (3, 11): @property def __unpacked__(self) -> bool: ... @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... - def __getattr__(self, name: str) -> Any: ... # incomplete + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 954f47d14502..cc27ae7dbda2 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -325,7 +325,7 @@ class SupportsRound(Protocol[_T_co]): def __round__(self, __ndigits: int) -> _T_co: ... @runtime_checkable -class Sized(Protocol, metaclass=ABCMeta): +class Sized(Protocol): @abstractmethod def __len__(self) -> int: ... @@ -452,10 +452,7 @@ class Container(Protocol[_T_co]): def __contains__(self, __x: object) -> bool: ... @runtime_checkable -class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): - # Implement Sized (but don't have it as a base class). - @abstractmethod - def __len__(self) -> int: ... +class Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ... class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @overload @@ -566,7 +563,7 @@ class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... -class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): +class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]): def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @@ -621,6 +618,8 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): # -- os._Environ.__ior__ # -- collections.UserDict.__ior__ # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ # -- weakref.WeakValueDictionary.__ior__ # -- weakref.WeakKeyDictionary.__ior__ @overload @@ -638,7 +637,9 @@ TYPE_CHECKING: bool # This differs from runtime, but better reflects the fact that in reality # classes deriving from IO use different names for the arguments. class IO(Iterator[AnyStr], Generic[AnyStr]): - # TODO use abstract properties + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 @property def mode(self) -> str: ... @property @@ -691,7 +692,7 @@ class BinaryIO(IO[bytes]): def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): - # TODO use abstractproperty + # See comment regarding the @properties in the `IO` class @property def buffer(self) -> BinaryIO: ... @property diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index 7337ab8789b2..5a9aa0a3395f 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadOnlyBuffer from typing import Any, TypeVar from typing_extensions import final @@ -21,7 +22,7 @@ def east_asian_width(__chr: str) -> str: ... if sys.version_info >= (3, 8): def is_normalized(__form: str, __unistr: str) -> bool: ... -def lookup(__name: str | bytes) -> str: ... +def lookup(__name: str | ReadOnlyBuffer) -> str: ... def mirrored(__chr: str) -> int: ... def name(__chr: str, __default: _T = ...) -> str | _T: ... def normalize(__form: str, __unistr: str) -> str: ... @@ -41,7 +42,7 @@ class UCD: if sys.version_info >= (3, 8): def is_normalized(self, __form: str, __unistr: str) -> bool: ... - def lookup(self, __name: str | bytes) -> str: ... + def lookup(self, __name: str | ReadOnlyBuffer) -> str: ... def mirrored(self, __chr: str) -> int: ... def name(self, __chr: str, __default: _T = ...) -> str | _T: ... def normalize(self, __form: str, __unistr: str) -> str: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 200f8dbaea23..c75539a97368 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -157,18 +157,14 @@ class TestCase: def assertRaisesRegex( # type: ignore[misc] self, expected_exception: type[BaseException] | tuple[type[BaseException], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + expected_regex: str | Pattern[str], callable: Callable[..., Any], *args: Any, **kwargs: Any, ) -> None: ... @overload def assertRaisesRegex( - self, - expected_exception: type[_E] | tuple[type[_E], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - *, - msg: Any = ..., + self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertRaisesContext[_E]: ... @overload def assertWarns( # type: ignore[misc] @@ -186,18 +182,14 @@ class TestCase: def assertWarnsRegex( # type: ignore[misc] self, expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + expected_regex: str | Pattern[str], callable: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs, ) -> None: ... @overload def assertWarnsRegex( - self, - expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - *, - msg: Any = ..., + self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( self, logger: str | logging.Logger | None = ..., level: int | str | None = ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 133380fce334..6c58f38a0d82 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -300,8 +300,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_Mock]: ... @overload + @staticmethod def object( # type: ignore[misc] - self, target: Any, attribute: str, new: _T, @@ -313,8 +313,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload + @staticmethod def object( - self, target: Any, attribute: str, *, @@ -325,8 +325,8 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[_Mock]: ... + @staticmethod def multiple( - self, target: Any, spec: Any | None = ..., create: bool = ..., @@ -335,7 +335,8 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[Any]: ... - def stopall(self) -> None: ... + @staticmethod + def stopall() -> None: ... patch: _patcher diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 207a05e75a57..02e2774b3b8e 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -1,6 +1,6 @@ import sys -from collections.abc import Callable, Mapping, Sequence -from typing import Any, AnyStr, Generic, NamedTuple, overload +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias @@ -132,14 +132,14 @@ def parse_qsl( separator: str = ..., ) -> list[tuple[AnyStr, AnyStr]]: ... @overload -def quote(string: str, safe: str | bytes = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... @overload -def quote(string: bytes, safe: str | bytes = ...) -> str: ... -def quote_from_bytes(bs: bytes, safe: str | bytes = ...) -> str: ... +def quote(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... @overload -def quote_plus(string: str, safe: str | bytes = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote_plus(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... @overload -def quote_plus(string: bytes, safe: str | bytes = ...) -> str: ... +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... if sys.version_info >= (3, 9): def unquote(string: str | bytes, encoding: str = ..., errors: str = ...) -> str: ... @@ -152,24 +152,27 @@ def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: .. @overload def urldefrag(url: str) -> DefragResult: ... @overload -def urldefrag(url: bytes | None) -> DefragResultBytes: ... +def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... + +_Q = TypeVar("_Q", bound=str | Iterable[int]) + def urlencode( query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], doseq: bool = ..., - safe: str | bytes = ..., + safe: _Q = ..., encoding: str = ..., errors: str = ..., - quote_via: Callable[[AnyStr, str | bytes, str, str], str] = ..., + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., ) -> str: ... def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... @overload def urlparse(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> ParseResult: ... @overload -def urlparse(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... +def urlparse(url: bytes | bytearray | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> SplitResult: ... @overload -def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... +def urlsplit(url: bytes | bytearray | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... @overload def urlunparse( components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] diff --git a/mypy/typeshed/stdlib/urllib/response.pyi b/mypy/typeshed/stdlib/urllib/response.pyi index 8c9a600f3c48..ca9781dbfbb4 100644 --- a/mypy/typeshed/stdlib/urllib/response.pyi +++ b/mypy/typeshed/stdlib/urllib/response.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Callable, Iterable from email.message import Message from types import TracebackType @@ -33,8 +33,8 @@ class addbase(BinaryIO): def tell(self) -> int: ... def truncate(self, size: int | None = ...) -> int: ... def writable(self) -> bool: ... - def write(self, s: bytes) -> int: ... - def writelines(self, lines: Iterable[bytes]) -> None: ... + def write(self, s: ReadableBuffer) -> int: ... + def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): closehook: Callable[..., object] diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index af960391e85d..9a619235e689 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -1,10 +1,5 @@ import sys from _typeshed import Self, SupportsKeysAndGetItem -from _weakrefset import WeakSet as WeakSet -from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping -from typing import Any, Generic, TypeVar, overload -from typing_extensions import ParamSpec - from _weakref import ( CallableProxyType as CallableProxyType, ProxyType as ProxyType, @@ -14,6 +9,10 @@ from _weakref import ( proxy as proxy, ref as ref, ) +from _weakrefset import WeakSet as WeakSet +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import ParamSpec __all__ = [ "ref", diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi index 588bd5969e98..fd5a552cf9c1 100644 --- a/mypy/typeshed/stdlib/winsound.pyi +++ b/mypy/typeshed/stdlib/winsound.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from typing import overload from typing_extensions import Literal @@ -21,7 +22,7 @@ if sys.platform == "win32": def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload - def PlaySound(sound: bytes | None, flags: Literal[4]) -> None: ... + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... @overload - def PlaySound(sound: str | bytes | None, flags: int) -> None: ... + def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... def MessageBeep(type: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/__init__.pyi b/mypy/typeshed/stdlib/xml/__init__.pyi index c524ac2b1cfc..a487d2467f41 100644 --- a/mypy/typeshed/stdlib/xml/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/__init__.pyi @@ -1 +1 @@ -import xml.parsers as parsers +from xml import parsers as parsers diff --git a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi index 3ca885dbbaa0..e460d6b21afa 100644 --- a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, ReadableBuffer, SupportsRead from typing import Any, NoReturn from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo from xml.dom.xmlbuilder import DOMBuilderFilter, Options @@ -30,8 +30,8 @@ class ExpatBuilder: def getParser(self): ... def reset(self) -> None: ... def install(self, parser) -> None: ... - def parseFile(self, file) -> Document: ... - def parseString(self, string: str) -> Document: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... def end_doctype_decl_handler(self) -> None: ... def pi_handler(self, target, data) -> None: ... @@ -87,14 +87,14 @@ class ParseEscape(Exception): ... class InternalSubsetExtractor(ExpatBuilder): subset: Any | None def getSubset(self) -> Any | None: ... - def parseFile(self, file) -> None: ... # type: ignore[override] - def parseString(self, string: str) -> None: ... # type: ignore[override] + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] + def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name, attrs) -> NoReturn: ... -def parse(file, namespaces: bool = ...): ... -def parseString(string: str, namespaces: bool = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = ...): ... +def parseString(string: str | ReadableBuffer, namespaces: bool = ...): ... def parseFragment(file, context, namespaces: bool = ...): ... def parseFragmentString(string: str, context, namespaces: bool = ...): ... def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index 04086fdc81b1..5997e031fd73 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,12 +1,12 @@ import sys import xml.dom -from _typeshed import Incomplete, Self, SupportsRead, SupportsWrite +from _typeshed import Incomplete, ReadableBuffer, Self, SupportsRead, SupportsWrite from typing_extensions import Literal from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader -def parse(file: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... -def parseString(string: str | bytes, parser: XMLReader | None = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = ...): ... def getDOMImplementation(features=...) -> DOMImplementation | None: ... class Node(xml.dom.Node): @@ -213,7 +213,7 @@ class CDATASection(Text): class ReadOnlySequentialNamedNodeMap: def __init__(self, seq=...) -> None: ... - def __len__(self): ... + def __len__(self) -> int: ... def getNamedItem(self, name): ... def getNamedItemNS(self, namespaceURI: str, localName): ... def __getitem__(self, name_or_tuple): ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi index 7bb78d0628ce..3e3e3f266206 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import StrOrBytesPath from collections.abc import Callable from xml.etree.ElementTree import Element @@ -11,7 +12,7 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: str | bytes | int, parse: str, encoding: str | None = ...) -> str | Element: ... +def default_loader(href: StrOrBytesPath | int, parse: str, encoding: str | None = ...) -> str | Element: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index 84059bc21a87..c063c1fd3488 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,6 +1,6 @@ import sys from _collections_abc import dict_keys -from _typeshed import FileDescriptor, StrOrBytesPath, SupportsRead, SupportsWrite +from _typeshed import FileDescriptor, ReadableBuffer, StrOrBytesPath, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence from typing import Any, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard @@ -54,7 +54,7 @@ def iselement(element: object) -> TypeGuard[Element]: ... if sys.version_info >= (3, 8): @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = ..., *, out: None = ..., from_file: _FileRead | None = ..., @@ -68,7 +68,7 @@ if sys.version_info >= (3, 8): ) -> str: ... @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = ..., *, out: SupportsWrite[str], from_file: _FileRead | None = ..., @@ -270,19 +270,19 @@ def iterparse( class XMLPullParser: def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. # Use `Any` to avoid false-positive errors. def read_events(self) -> Iterator[tuple[str, Any]]: ... -def XML(text: str | bytes, parser: XMLParser | None = ...) -> Element: ... -def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... +def XML(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | bytes], parser: XMLParser | None = ...) -> Element: ... +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = ...) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -313,9 +313,11 @@ class TreeBuilder: def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... def close(self) -> Element: ... - def data(self, __data: str | bytes) -> None: ... - def start(self, __tag: str | bytes, __attrs: dict[str | bytes, str | bytes]) -> Element: ... - def end(self, __tag: str | bytes) -> Element: ... + def data(self, __data: str) -> None: ... + # tag and attrs are passed to the element_factory, so they could be anything + # depending on what the particular factory supports. + def start(self, __tag: Any, __attrs: dict[Any, Any]) -> Element: ... + def end(self, __tag: str) -> Element: ... if sys.version_info >= (3, 8): # These two methods have pos-only parameters in the C implementation def comment(self, __text: str | None) -> Element: ... @@ -355,4 +357,4 @@ class XMLParser: def doctype(self, __name: str, __pubid: str, __system: str) -> None: ... def close(self) -> Any: ... - def feed(self, __data: str | bytes) -> None: ... + def feed(self, __data: str | ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi index cac086235cba..cebdb6a30014 100644 --- a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi @@ -1 +1 @@ -import xml.parsers.expat as expat +from xml.parsers import expat as expat diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index af4ee052480f..b8ab4d439e74 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import SupportsRead, _T_co +from _typeshed import ReadableBuffer, SupportsRead, _T_co from collections.abc import Iterable from typing import Any, NoReturn, Protocol from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler @@ -15,7 +15,7 @@ class SAXException(Exception): def __getitem__(self, ix: Any) -> NoReturn: ... class SAXParseException(SAXException): - def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ... + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... def getColumnNumber(self) -> int: ... def getLineNumber(self) -> int: ... def getPublicId(self): ... @@ -36,5 +36,5 @@ else: def parse( source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, errorHandler: ErrorHandler = ... ) -> None: ... -def parseString(string: bytes | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... +def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi index 517c17072b87..4480f4098635 100644 --- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -53,7 +53,7 @@ class AttributesImpl: def getQNameByName(self, name): ... def getNames(self): ... def getQNames(self): ... - def __len__(self): ... + def __len__(self) -> int: ... def __getitem__(self, name): ... def keys(self): ... def __contains__(self, name): ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index da1710787252..e964cd6eda87 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -1,6 +1,6 @@ import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath +from _typeshed import Self, StrOrBytesPath, StrPath, _BufferWithLen from collections.abc import Callable, Iterable, Iterator from os import PathLike from types import TracebackType @@ -103,7 +103,7 @@ class ZipFile: compression: int # undocumented compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented - pwd: str | None # undocumented + pwd: bytes | None # undocumented if sys.version_info >= (3, 11): @overload def __init__( @@ -173,7 +173,11 @@ class ZipFile: self, filename: StrPath, arcname: StrPath | None = ..., compress_type: int | None = ..., compresslevel: int | None = ... ) -> None: ... def writestr( - self, zinfo_or_arcname: str | ZipInfo, data: bytes | str, compress_type: int | None = ..., compresslevel: int | None = ... + self, + zinfo_or_arcname: str | ZipInfo, + data: _BufferWithLen | str, + compress_type: int | None = ..., + compresslevel: int | None = ..., ) -> None: ... if sys.version_info >= (3, 11): def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index db06544138ca..d3017f385c0c 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -1,9 +1,8 @@ -import os import sys +from _typeshed import StrOrBytesPath from importlib.abc import ResourceReader from importlib.machinery import ModuleSpec from types import CodeType, ModuleType -from typing import Any if sys.version_info >= (3, 8): __all__ = ["ZipImportError", "zipimporter"] @@ -13,7 +12,11 @@ class ZipImportError(ImportError): ... class zipimporter: archive: str prefix: str - def __init__(self, path: str | bytes | os.PathLike[Any]) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, path: str) -> None: ... + else: + def __init__(self, path: StrOrBytesPath) -> None: ... + def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... diff --git a/mypy/typeshed/stdlib/zlib.pyi b/mypy/typeshed/stdlib/zlib.pyi index cfd6784bb771..ea41567eefc5 100644 --- a/mypy/typeshed/stdlib/zlib.pyi +++ b/mypy/typeshed/stdlib/zlib.pyi @@ -1,6 +1,5 @@ import sys -from array import array -from typing import Any +from _typeshed import ReadableBuffer from typing_extensions import Literal DEFLATED: Literal[8] @@ -29,7 +28,7 @@ Z_TREES: Literal[6] class error(Exception): ... class _Compress: - def compress(self, data: bytes) -> bytes: ... + def compress(self, data: ReadableBuffer) -> bytes: ... def flush(self, mode: int = ...) -> bytes: ... def copy(self) -> _Compress: ... @@ -37,21 +36,26 @@ class _Decompress: unused_data: bytes unconsumed_tail: bytes eof: bool - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(__data: bytes, __value: int = ...) -> int: ... +def adler32(__data: ReadableBuffer, __value: int = ...) -> int: ... if sys.version_info >= (3, 11): - def compress(__data: bytes, level: int = ..., wbits: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = ..., wbits: int = ...) -> bytes: ... else: - def compress(__data: bytes, level: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = ...) -> bytes: ... def compressobj( - level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes | None = ... + level: int = ..., + method: int = ..., + wbits: int = ..., + memLevel: int = ..., + strategy: int = ..., + zdict: ReadableBuffer | None = ..., ) -> _Compress: ... -def crc32(__data: array[Any] | bytes, __value: int = ...) -> int: ... -def decompress(__data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ... -def decompressobj(wbits: int = ..., zdict: bytes = ...) -> _Decompress: ... +def crc32(__data: ReadableBuffer, __value: int = ...) -> int: ... +def decompress(__data: ReadableBuffer, wbits: int = ..., bufsize: int = ...) -> bytes: ... +def decompressobj(wbits: int = ..., zdict: ReadableBuffer = ...) -> _Decompress: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index 7f22c07b32c0..8b9ba9e7023a 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -1,6 +1,6 @@ from _typeshed import Self, StrPath from collections.abc import Iterable, Sequence -from datetime import tzinfo +from datetime import datetime, timedelta, tzinfo from typing import Any, Protocol __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] @@ -19,6 +19,9 @@ class ZoneInfo(tzinfo): def from_file(cls: type[Self], __fobj: _IOBytes, key: str | None = ...) -> Self: ... @classmethod def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str | None: ... + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + def dst(self, __dt: datetime | None) -> timedelta | None: ... # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution From 0062994228fb62975c6cef4d2c80d00c7aa1c545 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 01:08:36 -0800 Subject: [PATCH 056/292] Revert typeshed ctypes change (#14128) Since the plugin provides superior type checking: https://github.com/python/mypy/pull/13987#issuecomment-1310863427 --- mypy/typeshed/stdlib/ctypes/__init__.pyi | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 1851d3481ee2..84e4ba07a02a 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -266,11 +266,7 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - # Note: only available if _CT == c_char - @property - def raw(self) -> bytes: ... - @raw.setter - def raw(self, value: ReadableBuffer) -> None: ... + raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT From b650d9633d508005fc030e5e80097d74c3d24d0d Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 01:49:39 -0800 Subject: [PATCH 057/292] Revert use of recursive alias in typeshed (#14130) Easy crash to repro: `mypy --any-exprs-report=out -c 'pass' --show-traceback` --- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 3 ++- mypy/typeshed/stdlib/builtins.pyi | 14 ++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 1b54284fe727..3ae2fca1d19d 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -276,4 +276,5 @@ StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar -TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] +# TODO: Ideally this would be a recursive type alias +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Callable[[FrameType, str, Any], Any] | None] diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 00eac9e49cf0..5482955eb0ab 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1232,13 +1232,19 @@ def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: @overload def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... +# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): - _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] + def isinstance( + __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] + ) -> bool: ... + def issubclass( + __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] + ) -> bool: ... + else: - _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] + def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... -def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... -def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... def locals() -> dict[str, Any]: ... From 1d6a5b1d2abf617b149e8bf8ff435f64dc507fd3 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 18 Nov 2022 10:48:27 +0000 Subject: [PATCH 058/292] Fix daemon crash on malformed NamedTuple (#14119) Fixes #14098 Having invalid statements in a NamedTuple is almost like a syntax error, we can remove them after giving an error (without further analysis). This PR does almost exactly the same as https://github.com/python/mypy/pull/13963 did for TypedDicts. Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/nodes.py | 4 ++ mypy/semanal_namedtuple.py | 19 ++++-- mypy/semanal_typeddict.py | 2 + mypy/server/aststrip.py | 2 + test-data/unit/check-class-namedtuple.test | 2 - test-data/unit/fine-grained.test | 72 ++++++++++++++++++++++ 6 files changed, 94 insertions(+), 7 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index cf711c45f587..ebf2f5cb271a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1068,6 +1068,7 @@ class ClassDef(Statement): "analyzed", "has_incompatible_baseclass", "deco_line", + "removed_statements", ) __match_args__ = ("name", "defs") @@ -1086,6 +1087,8 @@ class ClassDef(Statement): keywords: dict[str, Expression] analyzed: Expression | None has_incompatible_baseclass: bool + # Used by special forms like NamedTuple and TypedDict to store invalid statements + removed_statements: list[Statement] def __init__( self, @@ -1111,6 +1114,7 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None + self.removed_statements = [] def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 04308db99e63..ec5f13d0fce0 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -32,6 +32,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, SymbolTable, SymbolTableNode, @@ -111,7 +112,7 @@ def analyze_namedtuple_classdef( if result is None: # This is a valid named tuple, but some types are incomplete. return True, None - items, types, default_items = result + items, types, default_items, statements = result if is_func_scope and "@" not in defn.name: defn.name += "@" + str(defn.line) existing_info = None @@ -123,6 +124,7 @@ def analyze_namedtuple_classdef( defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. @@ -130,24 +132,27 @@ def analyze_namedtuple_classdef( def check_namedtuple_classdef( self, defn: ClassDef, is_stub_file: bool - ) -> tuple[list[str], list[Type], dict[str, Expression]] | None: + ) -> tuple[list[str], list[Type], dict[str, Expression], list[Statement]] | None: """Parse and validate fields in named tuple class definition. - Return a three tuple: + Return a four tuple: * field names * field types * field default values + * valid statements or None, if any of the types are not ready. """ if self.options.python_version < (3, 6) and not is_stub_file: self.fail("NamedTuple class syntax is only supported in Python 3.6", defn) - return [], [], {} + return [], [], {}, [] if len(defn.base_type_exprs) > 1: self.fail("NamedTuple should be a single base", defn) items: list[str] = [] types: list[Type] = [] default_items: dict[str, Expression] = {} + statements: list[Statement] = [] for stmt in defn.defs.body: + statements.append(stmt) if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). if isinstance(stmt, PassStmt) or ( @@ -160,9 +165,13 @@ def check_namedtuple_classdef( # And docstrings. if isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): continue + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) else: # Append name and type in this case... @@ -199,7 +208,7 @@ def check_namedtuple_classdef( ) else: default_items[name] = stmt.rvalue - return items, types, default_items + return items, types, default_items, statements def check_namedtuple( self, node: Expression, var_name: str | None, is_func_scope: bool diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index e8be82bd41be..fb45dcc0dfc4 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -283,9 +283,11 @@ def analyze_typeddict_classdef_fields( ): statements.append(stmt) else: + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) else: name = stmt.lvalues[0].name diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 1bfd820efb21..87ce63e9d543 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -140,6 +140,8 @@ def visit_class_def(self, node: ClassDef) -> None: ] with self.enter_class(node.info): super().visit_class_def(node) + node.defs.body.extend(node.removed_statements) + node.removed_statements = [] TypeState.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index 8e0545953bd8..8ae7f6555f9d 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -393,8 +393,6 @@ class X(typing.NamedTuple): [out] main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" -main:7: error: Type cannot be declared in assignment to non-self attribute -main:7: error: "int" has no attribute "x" main:9: error: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index a6d8f206fbba..c162f402486a 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10205,3 +10205,75 @@ C [builtins fixtures/dict.pyi] [out] == + +[case testNamedTupleNestedCrash] +import m +[file m.py] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int + +[file m.py.2] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int +# change +[builtins fixtures/tuple.pyi] +[out] +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testNamedTupleNestedClassRecheck] +import n +[file n.py] +import m +x: m.NT +[file m.py] +from typing import NamedTuple +from f import A + +class NT(NamedTuple): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/tuple.pyi] +[out] +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testTypedDictNestedClassRecheck] +import n +[file n.py] +import m +x: m.TD +[file m.py] +from typing_extensions import TypedDict +from f import A + +class TD(TypedDict): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/dict.pyi] +[out] +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" +== +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" From a206096050d87db65aa8fcd3ab3f3d0dc2302036 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 18 Nov 2022 19:22:40 +0000 Subject: [PATCH 059/292] Enable lxml tests on Python 3.11 (#14134) Ref #12840 --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 7fe486387f2f..399785ce4c1c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,7 +7,7 @@ flake8==5.0.4 # must match version in .pre-commit-config.yaml flake8-bugbear==22.9.23 # must match version in .pre-commit-config.yaml flake8-noqa==1.2.9 # must match version in .pre-commit-config.yaml isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml -lxml>=4.4.0; python_version<'3.11' +lxml>=4.9.1 psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 pytest>=6.2.4 From 1cc4a7d38daac2aa641c9355a27820beba3542e1 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 11:23:28 -0800 Subject: [PATCH 060/292] Revert ctypes patch on an ongoing basis (#14129) --- misc/sync-typeshed.py | 1 + 1 file changed, 1 insertion(+) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 878ffaa23bfb..8eeb9be7f4f8 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -187,6 +187,7 @@ def main() -> None: commits_to_cherry_pick = [ "780534b13722b7b0422178c049a1cbbf4ea4255b", # LiteralString reverts "5319fa34a8004c1568bb6f032a07b8b14cc95bed", # sum reverts + "0062994228fb62975c6cef4d2c80d00c7aa1c545", # ctypes reverts ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) From 05a3f7d8d61bc298809e5363d3a23aa16fe776d2 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 18 Nov 2022 19:37:58 +0000 Subject: [PATCH 061/292] Correctly handle Enum name on Python 3.11 (#14133) Fixes #12483 Fixes https://github.com/python/typeshed/issues/7564 Ref #12841 The fix is straightforward. I can't use a unit test for this because there are some builtins fixtures that don't have tuple, so I can't do version check. --- mypy/semanal.py | 8 +++++++- test-data/unit/pythoneval.test | 23 +++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 9b2b4ba44cce..538e37c030a9 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1448,7 +1448,13 @@ def visit_decorator(self, dec: Decorator) -> None: dec.var.is_classmethod = True self.check_decorated_function_is_method("classmethod", dec) elif refers_to_fullname( - d, ("builtins.property", "abc.abstractproperty", "functools.cached_property") + d, + ( + "builtins.property", + "abc.abstractproperty", + "functools.cached_property", + "enum.property", + ), ): removed.append(i) dec.func.is_property = True diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index f6336b48ee7b..3f669246bb4e 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1712,3 +1712,26 @@ A = Type[int] | str B: TypeAlias = Type[int] | str [out] m.pyi:5: note: Revealed type is "typing._SpecialForm" + +[case testEnumNameWorkCorrectlyOn311] +# flags: --python-version 3.11 +import enum + +class E(enum.Enum): + X = 1 + Y = 2 + @enum.property + def foo(self) -> int: ... + +e: E +reveal_type(e.name) +reveal_type(e.value) +reveal_type(E.X.name) +reveal_type(e.foo) +reveal_type(E.Y.foo) +[out] +_testEnumNameWorkCorrectlyOn311.py:11: note: Revealed type is "builtins.str" +_testEnumNameWorkCorrectlyOn311.py:12: note: Revealed type is "Union[Literal[1]?, Literal[2]?]" +_testEnumNameWorkCorrectlyOn311.py:13: note: Revealed type is "Literal['X']?" +_testEnumNameWorkCorrectlyOn311.py:14: note: Revealed type is "builtins.int" +_testEnumNameWorkCorrectlyOn311.py:15: note: Revealed type is "builtins.int" From a2477ff0d0cb751f27a2b38d27ce6572ead03451 Mon Sep 17 00:00:00 2001 From: ChristianWitzler <57713653+ChristianWitzler@users.noreply.github.com> Date: Fri, 18 Nov 2022 21:00:08 +0100 Subject: [PATCH 062/292] Update code example in "Declaring decorators" (#14131) - Added missing cast import - Changed revealed type Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- docs/source/generics.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 59d4aa1a2dea..9a13e2a955c4 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -635,7 +635,7 @@ Before parameter specifications, here's how one might have annotated the decorat .. code-block:: python - from typing import Callable, TypeVar + from typing import Any, Callable, TypeVar, cast F = TypeVar('F', bound=Callable[..., Any]) @@ -650,8 +650,8 @@ and that would enable the following type checks: .. code-block:: python - reveal_type(a) # str - add_forty_two('x') # Type check error: incompatible type "str"; expected "int" + reveal_type(a) # Revealed type is "builtins.int" + add_forty_two('x') # Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" Note that the ``wrapper()`` function is not type-checked. Wrapper From 6cd8e007923acef7a2899e85378bc4822472b848 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 19 Nov 2022 02:00:33 +0000 Subject: [PATCH 063/292] Fix type query for recursive aliases (#14136) See https://github.com/python/mypy/pull/14130 for context. Btw it looks like these `Any` reports are quite broken in general. Some issues I found: * Many types are reported twice (even non-recursive) * Explicit `Any` in alias r.h.s are not counted (because of reckless `res = make_any_non_explicit(res)` in semanal.py) * For generic aliases we count their r.h.s. as containing `Any` from omitted generics I tried to fix these things, but it is not trivial, so maybe we can do it later in a separate PR. --- mypy/type_visitor.py | 24 ++++++++---------------- mypy/typeanal.py | 2 +- test-data/unit/reports.test | 24 +++++++++++++++++++++++- 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index fe404cda0bec..0f5ac05e68ac 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -404,24 +404,16 @@ def visit_placeholder_type(self, t: PlaceholderType) -> T: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> T: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if t in self.seen_aliases: + return self.strategy([]) + self.seen_aliases.add(t) if self.skip_alias_target: return self.query_types(t.args) return get_proper_type(t).accept(self) def query_types(self, types: Iterable[Type]) -> T: - """Perform a query for a list of types. - - Use the strategy to combine the results. - Skip type aliases already visited types to avoid infinite recursion. - """ - res: list[T] = [] - for t in types: - if isinstance(t, TypeAliasType): - # Avoid infinite recursion for recursive type aliases. - # TODO: Ideally we should fire subvisitors here (or use caching) if we care - # about duplicates. - if t in self.seen_aliases: - continue - self.seen_aliases.add(t) - res.append(t.accept(self)) - return self.strategy(res) + """Perform a query for a list of types using the strategy to combine the results.""" + return self.strategy([t.accept(self) for t in types]) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 18a63011c5bf..0dc1717d0724 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -450,7 +450,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ if fullname == "builtins.None": return NoneType() elif fullname == "typing.Any" or fullname == "builtins.Any": - return AnyType(TypeOfAny.explicit) + return AnyType(TypeOfAny.explicit, line=t.line, column=t.column) elif fullname in FINAL_TYPE_NAMES: self.fail( "Final can be only used as an outermost qualifier in a variable annotation", diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test index a7ab6d754b2c..50dabb1fdea9 100644 --- a/test-data/unit/reports.test +++ b/test-data/unit/reports.test @@ -103,6 +103,28 @@ class A(object): +[case testNoCrashRecursiveAliasInReport] +# cmd: mypy --any-exprs-report report n.py + +[file n.py] +from typing import Union, List, Any, TypeVar + +Nested = List[Union[Any, Nested]] +T = TypeVar("T") +NestedGen = List[Union[T, NestedGen[T]]] + +x: Nested +y: NestedGen[int] +z: NestedGen[Any] + +[file report/any-exprs.txt] +[outfile report/types-of-anys.txt] + Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact +----------------------------------------------------------------------------------------------------------------- + n 0 4 0 8 0 0 0 +----------------------------------------------------------------------------------------------------------------- +Total 0 4 0 8 0 0 0 + [case testTypeVarTreatedAsEmptyLine] # cmd: mypy --html-report report n.py @@ -480,7 +502,7 @@ namespace_packages = True -

folder.subfolder.something

+

folder.subfolder.something

From 56e9396e681d779cbb593d82d21c1fae76c6f430 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 20:10:54 -0800 Subject: [PATCH 064/292] Make non-numeric non-empty FORCE_COLOR truthy (#14140) Fixes #14139 --- mypy/util.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index e836aefb3c7e..04ed616ade07 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -520,7 +520,11 @@ def parse_gray_color(cup: bytes) -> str: def should_force_color() -> bool: - return bool(int(os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")))) + env_var = os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")) + try: + return bool(int(env_var)) + except ValueError: + return bool(env_var) class FancyFormatter: From e814c47a1950dba765207333cf91a61a2d9188ee Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 19 Nov 2022 15:07:24 +0000 Subject: [PATCH 065/292] Fix incremental crash on generic function appearing in nested position (#14148) Fixes #14137 Fix is trivial, I just forgot to call `super()` in one of my previous PRs. --- mypy/checkmember.py | 1 + test-data/unit/check-incremental.test | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 08d4ff412e4e..554b49d3eda2 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -806,6 +806,7 @@ class FreezeTypeVarsVisitor(TypeTraverserVisitor): def visit_callable_type(self, t: CallableType) -> None: for v in t.variables: v.id.meta_level = 0 + super().visit_callable_type(t) def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> SymbolNode | None: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 5fca0f55a0d6..e5b69fb6fb9d 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6334,3 +6334,17 @@ reveal_type(D().meth) [out2] tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0" tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D" + +[case testIncrementalNestedGenericCallableCrash] +from typing import TypeVar, Callable + +T = TypeVar("T") + +class B: + def foo(self) -> Callable[[T], T]: ... + +class C(B): + def __init__(self) -> None: + self.x = self.foo() +[out] +[out2] From f8d71f13d408198f81d55a6b57bf1d2c1b81a3c2 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 19 Nov 2022 22:54:24 +0000 Subject: [PATCH 066/292] Make is_recursive and has_recursive_types() more consistent (#14147) While working on another PR I noticed that current behavior of `has_recursive_types()` is inconsistent, it returns `False` is there is a recursive type nested as an argument to a generic non-recursive alias. I wasn't able to find any situation where this actually matters, but I think it is better if this function behaves consistently. --- mypy/test/testtypes.py | 7 +++++++ mypy/test/typefixture.py | 10 +++++++--- mypy/types.py | 26 +++++++++++++++++++------- 3 files changed, 33 insertions(+), 10 deletions(-) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 31bdd6690a7a..18948ee7f6d6 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -31,6 +31,7 @@ UninhabitedType, UnionType, get_proper_type, + has_recursive_types, ) @@ -157,6 +158,12 @@ def test_type_alias_expand_all(self) -> None: [self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a]) ) + def test_recursive_nested_in_non_recursive(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + NA = self.fx.non_rec_alias(Instance(self.fx.gi, [UnboundType("T")]), ["T"], [A]) + assert not NA.is_recursive + assert has_recursive_types(NA) + def test_indirection_no_infinite_recursion(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) visitor = TypeIndirectionVisitor() diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index 380da909893a..93e5e4b0b5ca 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -339,9 +339,13 @@ def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: A.alias = AN return A, target - def non_rec_alias(self, target: Type) -> TypeAliasType: - AN = TypeAlias(target, "__main__.A", -1, -1) - return TypeAliasType(AN, []) + def non_rec_alias( + self, target: Type, alias_tvars: list[str] | None = None, args: list[Type] | None = None + ) -> TypeAliasType: + AN = TypeAlias(target, "__main__.A", -1, -1, alias_tvars=alias_tvars) + if args is None: + args = [] + return TypeAliasType(AN, args) class InterfaceTypeFixture(TypeFixture): diff --git a/mypy/types.py b/mypy/types.py index 1de294f9952d..6c08b24afd80 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -278,30 +278,42 @@ def _expand_once(self) -> Type: self.alias.target, self.alias.alias_tvars, self.args, self.line, self.column ) - def _partial_expansion(self) -> tuple[ProperType, bool]: + def _partial_expansion(self, nothing_args: bool = False) -> tuple[ProperType, bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set()) - unrolled = self.accept(unroller) + if nothing_args: + alias = self.copy_modified(args=[UninhabitedType()] * len(self.args)) + else: + alias = self + unrolled = alias.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed - def expand_all_if_possible(self) -> ProperType | None: + def expand_all_if_possible(self, nothing_args: bool = False) -> ProperType | None: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, - return None. + return None. If nothing_args is True, replace all type arguments with an + UninhabitedType() (used to detect recursively defined aliases). """ - unrolled, recursed = self._partial_expansion() + unrolled, recursed = self._partial_expansion(nothing_args=nothing_args) if recursed: return None return unrolled @property def is_recursive(self) -> bool: + """Whether this type alias is recursive. + + Note this doesn't check generic alias arguments, but only if this alias + *definition* is recursive. The property value thus can be cached on the + underlying TypeAlias node. If you want to include all nested types, use + has_recursive_types() function. + """ assert self.alias is not None, "Unfixed type alias" is_recursive = self.alias._is_recursive if is_recursive is None: - is_recursive = self.expand_all_if_possible() is None + is_recursive = self.expand_all_if_possible(nothing_args=True) is None # We cache the value on the underlying TypeAlias node as an optimization, # since the value is the same for all instances of the same alias. self.alias._is_recursive = is_recursive @@ -3259,7 +3271,7 @@ def __init__(self) -> None: super().__init__(any) def visit_type_alias_type(self, t: TypeAliasType) -> bool: - return t.is_recursive + return t.is_recursive or self.query_types(t.args) def has_recursive_types(typ: Type) -> bool: From c660354846688ff8158d0f0178eb298171b74f5b Mon Sep 17 00:00:00 2001 From: Tom Schraitle Date: Mon, 21 Nov 2022 09:00:36 +0100 Subject: [PATCH 067/292] Avoid use of implicit optional in decorator factory docs (#14156) --- docs/source/generics.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 9a13e2a955c4..a5c7b8accaa8 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -724,7 +724,7 @@ achieved by combining with :py:func:`@overload `: .. code-block:: python - from typing import Any, Callable, TypeVar, overload + from typing import Any, Callable, Optional, TypeVar, overload F = TypeVar('F', bound=Callable[..., Any]) @@ -736,7 +736,7 @@ achieved by combining with :py:func:`@overload `: def atomic(*, savepoint: bool = True) -> Callable[[F], F]: ... # Implementation - def atomic(__func: Callable[..., Any] = None, *, savepoint: bool = True): + def atomic(__func: Optional[Callable[..., Any]] = None, *, savepoint: bool = True): def decorator(func: Callable[..., Any]): ... # Code goes here if __func is not None: From 3c5f368872413fae867bf4a2dff7781cbf547459 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 21 Nov 2022 18:42:13 +0000 Subject: [PATCH 068/292] Allow function arguments as base classes (#14135) Fixes #5865 Looks quite easy and safe, unless I am missing something. Most changes in the diff are just moving stuff around. Previously we only applied argument types before type checking, but it looks like we can totally do this in semantic analyzer. I also enable variable annotated as `type` (or equivalently `Type[Any]`), this use case was mentioned in the comments. This PR also accidentally fixes two additional bugs, one related to type variables with values vs walrus operator, another one for type variables with values vs self types. I include test cases for those as well. --- mypy/checker.py | 29 +++-------------------------- mypy/semanal.py | 25 +++++++++++++++++++++---- mypy/stubtest.py | 2 +- mypy/treetransform.py | 2 +- mypy/typeanal.py | 8 ++++++++ mypy/types.py | 28 ++++++++++++++++++++++++++++ test-data/unit/check-classes.test | 15 +++++++++++++++ test-data/unit/check-python38.test | 16 ++++++++++++++++ test-data/unit/check-selftype.test | 13 +++++++++++++ test-data/unit/semanal-types.test | 2 ++ 10 files changed, 108 insertions(+), 32 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index c7de4911501a..7a66a9408ee4 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -184,7 +184,6 @@ LiteralType, NoneType, Overloaded, - ParamSpecType, PartialType, ProperType, StarType, @@ -203,7 +202,6 @@ UnboundType, UninhabitedType, UnionType, - UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, @@ -211,6 +209,7 @@ is_named_instance, is_optional, remove_optional, + store_argument_type, strip_type, ) from mypy.typetraverser import TypeTraverserVisitor @@ -1174,30 +1173,8 @@ def check_func_def( if ctx.line < 0: ctx = typ self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) - if typ.arg_kinds[i] == nodes.ARG_STAR: - if isinstance(arg_type, ParamSpecType): - pass - elif isinstance(arg_type, UnpackType): - if isinstance(get_proper_type(arg_type.type), TupleType): - # Instead of using Tuple[Unpack[Tuple[...]]], just use - # Tuple[...] - arg_type = arg_type.type - else: - arg_type = TupleType( - [arg_type], - fallback=self.named_generic_type( - "builtins.tuple", [self.named_type("builtins.object")] - ), - ) - else: - # builtins.tuple[T] is typing.Tuple[T, ...] - arg_type = self.named_generic_type("builtins.tuple", [arg_type]) - elif typ.arg_kinds[i] == nodes.ARG_STAR2: - if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: - arg_type = self.named_generic_type( - "builtins.dict", [self.str_type(), arg_type] - ) - item.arguments[i].variable.type = arg_type + # Need to store arguments again for the expanded item. + store_argument_type(item, i, typ, self.named_generic_type) # Type check initialization expressions. body_is_trivial = is_trivial_body(defn.body) diff --git a/mypy/semanal.py b/mypy/semanal.py index 538e37c030a9..a5ddcc70eed6 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -273,6 +273,7 @@ get_proper_types, invalid_recursive_alias, is_named_instance, + store_argument_type, ) from mypy.typevars import fill_typevars from mypy.util import ( @@ -1315,7 +1316,10 @@ def analyze_function_body(self, defn: FuncItem) -> None: # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() - a.bind_function_type_variables(cast(CallableType, defn.type), defn) + typ = cast(CallableType, defn.type) + a.bind_function_type_variables(typ, defn) + for i in range(len(typ.arg_types)): + store_argument_type(defn, i, typ, self.named_type) self.function_stack.append(defn) with self.enter(defn): for arg in defn.arguments: @@ -2018,7 +2022,9 @@ def analyze_base_classes( continue try: - base = self.expr_to_analyzed_type(base_expr, allow_placeholder=True) + base = self.expr_to_analyzed_type( + base_expr, allow_placeholder=True, allow_type_any=True + ) except TypeTranslationError: name = self.get_name_repr_of_expr(base_expr) if isinstance(base_expr, CallExpr): @@ -6139,7 +6145,11 @@ def accept(self, node: Node) -> None: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def expr_to_analyzed_type( - self, expr: Expression, report_invalid_types: bool = True, allow_placeholder: bool = False + self, + expr: Expression, + report_invalid_types: bool = True, + allow_placeholder: bool = False, + allow_type_any: bool = False, ) -> Type | None: if isinstance(expr, CallExpr): # This is a legacy syntax intended mostly for Python 2, we keep it for @@ -6164,7 +6174,10 @@ def expr_to_analyzed_type( return TupleType(info.tuple_type.items, fallback=fallback) typ = self.expr_to_unanalyzed_type(expr) return self.anal_type( - typ, report_invalid_types=report_invalid_types, allow_placeholder=allow_placeholder + typ, + report_invalid_types=report_invalid_types, + allow_placeholder=allow_placeholder, + allow_type_any=allow_type_any, ) def analyze_type_expr(self, expr: Expression) -> None: @@ -6188,6 +6201,7 @@ def type_analyzer( allow_param_spec_literals: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, + allow_type_any: bool = False, ) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope @@ -6204,6 +6218,7 @@ def type_analyzer( allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack @@ -6224,6 +6239,7 @@ def anal_type( allow_param_spec_literals: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, + allow_type_any: bool = False, third_pass: bool = False, ) -> Type | None: """Semantically analyze a type. @@ -6260,6 +6276,7 @@ def anal_type( allow_param_spec_literals=allow_param_spec_literals, report_invalid_types=report_invalid_types, prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tag = self.track_incomplete_refs() typ = typ.accept(a) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 87ccbd3176df..74e57d9e5617 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -354,7 +354,7 @@ def _verify_final( ) -> Iterator[Error]: try: - class SubClass(runtime): # type: ignore[misc,valid-type] + class SubClass(runtime): # type: ignore[misc] pass except TypeError: diff --git a/mypy/treetransform.py b/mypy/treetransform.py index c863db6b3dd5..2f678b89b1e6 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -550,7 +550,7 @@ def visit_super_expr(self, node: SuperExpr) -> SuperExpr: return new def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: - return AssignmentExpr(node.target, node.value) + return AssignmentExpr(self.expr(node.target), self.expr(node.value)) def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 0dc1717d0724..f22fa30706c4 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -201,6 +201,7 @@ def __init__( allow_param_spec_literals: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, + allow_type_any: bool = False, ) -> None: self.api = api self.lookup_qualified = api.lookup_qualified @@ -237,6 +238,8 @@ def __init__( # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used: set[str] = set() self.prohibit_self_type = prohibit_self_type + # Allow variables typed as Type[Any] and type (useful for base classes). + self.allow_type_any = allow_type_any def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) @@ -730,6 +733,11 @@ def analyze_unbound_type_without_type_info( return AnyType( TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name ) + elif self.allow_type_any: + if isinstance(typ, Instance) and typ.type.fullname == "builtins.type": + return AnyType(TypeOfAny.special_form) + if isinstance(typ, TypeType) and isinstance(typ.item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=typ.item) # Option 2: # Unbound type variable. Currently these may be still valid, # for example when defining a generic type alias. diff --git a/mypy/types.py b/mypy/types.py index 6c08b24afd80..78142d9003d9 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -7,6 +7,7 @@ from typing import ( TYPE_CHECKING, Any, + Callable, ClassVar, Dict, Iterable, @@ -29,6 +30,7 @@ ArgKind, FakeInfo, FuncDef, + FuncItem, SymbolNode, ) from mypy.state import state @@ -3402,3 +3404,29 @@ def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance fallback=fallback, is_ellipsis_args=True, ) + + +def store_argument_type( + defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] +) -> None: + arg_type = typ.arg_types[i] + if typ.arg_kinds[i] == ARG_STAR: + if isinstance(arg_type, ParamSpecType): + pass + elif isinstance(arg_type, UnpackType): + if isinstance(get_proper_type(arg_type.type), TupleType): + # Instead of using Tuple[Unpack[Tuple[...]]], just use + # Tuple[...] + arg_type = arg_type.type + else: + arg_type = TupleType( + [arg_type], + fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), + ) + else: + # builtins.tuple[T] is typing.Tuple[T, ...] + arg_type = named_type("builtins.tuple", [arg_type]) + elif typ.arg_kinds[i] == ARG_STAR2: + if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: + arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) + defn.arguments[i].variable.type = arg_type diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 33208c081c28..e3aea122ebe1 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7664,3 +7664,18 @@ class C(B): def foo(self) -> int: # E: Signature of "foo" incompatible with supertype "B" ... [builtins fixtures/property.pyi] + +[case testAllowArgumentAsBaseClass] +from typing import Any, Type + +def e(b) -> None: + class D(b): ... + +def f(b: Any) -> None: + class D(b): ... + +def g(b: Type[Any]) -> None: + class D(b): ... + +def h(b: type) -> None: + class D(b): ... diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 1922192c2877..30bdadf900c3 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -718,3 +718,19 @@ def f1() -> None: y = x z = x [builtins fixtures/dict.pyi] + +[case testNarrowOnSelfInGeneric] +# flags: --strict-optional +from typing import Generic, TypeVar, Optional + +T = TypeVar("T", int, str) + +class C(Generic[T]): + x: Optional[T] + def meth(self) -> Optional[T]: + if (y := self.x) is not None: + reveal_type(y) + return None +[out] +main:10: note: Revealed type is "builtins.int" +main:10: note: Revealed type is "builtins.str" diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 494ae54400fb..b002746a3397 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1772,3 +1772,16 @@ class D(C): ... reveal_type(D.f) # N: Revealed type is "def [T] (T`-1) -> T`-1" reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testTypingSelfOnSuperTypeVarValues] +from typing import Self, Generic, TypeVar + +T = TypeVar("T", int, str) + +class B: + def copy(self) -> Self: ... +class C(B, Generic[T]): + def copy(self) -> Self: + inst = super().copy() + reveal_type(inst) # N: Revealed type is "Self`0" + return inst diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index d832772f5f81..8dc767e1abfc 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -790,6 +790,7 @@ def f(x: int) -> None: pass def f(*args) -> None: pass x = f +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [overload]) @@ -1032,6 +1033,7 @@ MypyFile:1( [case testVarArgsAndKeywordArgs] def g(*x: int, y: str = ''): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( FuncDef:1( From b83ac9cff3d38f868e45e4c4b011cbd2fdd37fc3 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 22 Nov 2022 16:28:55 +0000 Subject: [PATCH 069/292] Try empty context when assigning to union typed variables (#14151) Fixes #4805 Fixes #13936 It is known that mypy can overuse outer type context sometimes (especially when it is a union). This prevents a common use case for narrowing types (see issue and test cases). This is a somewhat major semantic change, but I think it should match better what a user would expect. --- mypy/checker.py | 44 ++++++++++++++++ test-data/unit/check-inference-context.test | 57 +++++++++++++++++++++ test-data/unit/check-typeddict.test | 2 +- 3 files changed, 102 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 7a66a9408ee4..b0fde94025b6 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -76,6 +76,7 @@ AssignmentStmt, Block, BreakStmt, + BytesExpr, CallExpr, ClassDef, ComparisonExpr, @@ -86,6 +87,7 @@ EllipsisExpr, Expression, ExpressionStmt, + FloatExpr, ForStmt, FuncBase, FuncDef, @@ -115,6 +117,7 @@ ReturnStmt, StarExpr, Statement, + StrExpr, SymbolNode, SymbolTable, SymbolTableNode, @@ -3826,6 +3829,23 @@ def inference_error_fallback_type(self, type: Type) -> Type: # we therefore need to erase them. return erase_typevars(fallback) + def simple_rvalue(self, rvalue: Expression) -> bool: + """Returns True for expressions for which inferred type should not depend on context. + + Note that this function can still return False for some expressions where inferred type + does not depend on context. It only exists for performance optimizations. + """ + if isinstance(rvalue, (IntExpr, StrExpr, BytesExpr, FloatExpr, RefExpr)): + return True + if isinstance(rvalue, CallExpr): + if isinstance(rvalue.callee, RefExpr) and isinstance(rvalue.callee.node, FuncBase): + typ = rvalue.callee.node.type + if isinstance(typ, CallableType): + return not typ.variables + elif isinstance(typ, Overloaded): + return not any(item.variables for item in typ.items) + return False + def check_simple_assignment( self, lvalue_type: Type | None, @@ -3847,6 +3867,30 @@ def check_simple_assignment( rvalue_type = self.expr_checker.accept( rvalue, lvalue_type, always_allow_any=always_allow_any ) + if ( + isinstance(get_proper_type(lvalue_type), UnionType) + # Skip literal types, as they have special logic (for better errors). + and not isinstance(get_proper_type(rvalue_type), LiteralType) + and not self.simple_rvalue(rvalue) + ): + # Try re-inferring r.h.s. in empty context, and use that if it + # results in a narrower type. We don't do this always because this + # may cause some perf impact, plus we want to partially preserve + # the old behavior. This helps with various practical examples, see + # e.g. testOptionalTypeNarrowedByGenericCall. + with self.msg.filter_errors() as local_errors, self.local_type_map() as type_map: + alt_rvalue_type = self.expr_checker.accept( + rvalue, None, always_allow_any=always_allow_any + ) + if ( + not local_errors.has_new_errors() + # Skip Any type, since it is special cased in binder. + and not isinstance(get_proper_type(alt_rvalue_type), AnyType) + and is_valid_inferred_type(alt_rvalue_type) + and is_proper_subtype(alt_rvalue_type, rvalue_type) + ): + rvalue_type = alt_rvalue_type + self.store_types(type_map) if isinstance(rvalue_type, DeletedType): self.msg.deleted_as_rvalue(rvalue_type, context) if isinstance(lvalue_type, DeletedType): diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 2e26f54c6e93..f80f93eb2615 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -1419,3 +1419,60 @@ def bar(x: Union[Mapping[Any, Any], Dict[Any, Sequence[Any]]]) -> None: ... bar({1: 2}) [builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} + +def foo(arg: Optional[str] = None) -> None: + if arg is None: + arg = d.get("a", "b") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall2] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} +x: Optional[str] +if x: + reveal_type(x) # N: Revealed type is "builtins.str" + x = d.get(x, x) + reveal_type(x) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall3] +# flags: --strict-optional +from typing import Generic, TypeVar, Union + +T = TypeVar("T") +def bar(arg: Union[str, T]) -> Union[str, T]: ... + +def foo(arg: Union[str, int]) -> None: + if isinstance(arg, int): + arg = bar("default") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/isinstance.pyi] + +[case testOptionalTypeNarrowedByGenericCall4] +# flags: --strict-optional +from typing import Optional, List, Generic, TypeVar + +T = TypeVar("T", covariant=True) +class C(Generic[T]): ... + +x: Optional[C[int]] = None +y = x = C() +reveal_type(y) # N: Revealed type is "__main__.C[builtins.int]" + +[case testOptionalTypeNarrowedByGenericCall5] +from typing import Any, Tuple, Union + +i: Union[Tuple[Any, ...], int] +b: Any +i = i if isinstance(i, int) else b +reveal_type(i) # N: Revealed type is "Union[Any, builtins.int]" +[builtins fixtures/isinstance.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 24521062a5d4..fbef6157087c 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -893,7 +893,7 @@ B = TypedDict('B', {'@type': Literal['b-type'], 'b': int}) c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} reveal_type(c) # N: Revealed type is "Union[TypedDict('__main__.A', {'@type': Literal['a-type'], 'a': builtins.str}), TypedDict('__main__.B', {'@type': Literal['b-type'], 'b': builtins.int})]" -[builtins fixtures/tuple.pyi] +[builtins fixtures/dict.pyi] [case testTypedDictUnionAmbiguousCase] from typing import Union, Mapping, Any, cast From 8fb482ff72f94b1f16e6c63746d4cb9cd111c76c Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Tue, 22 Nov 2022 12:55:55 -0600 Subject: [PATCH 070/292] [partially defined] handle unreachable blocks (#14161) This adds support for unreachable blocks in `partially-defined` check. Currently, this only supports blocks that are detected as unreachable during semantic analysis (so mostly stuff like python version, etc.). This doesn't support more advanced cases (see #13926 for an example what's not covered). Closes #13929 --- mypy/partially_defined.py | 9 +++++++- test-data/unit/check-partially-defined.test | 19 ++++++++++++++++ test-data/unit/check-python310.test | 24 +++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 7d87315c23ad..5854036c0df3 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -203,9 +203,13 @@ def visit_if_stmt(self, o: IfStmt) -> None: e.accept(self) self.tracker.start_branch_statement() for b in o.body: + if b.is_unreachable: + continue b.accept(self) self.tracker.next_branch() if o.else_body: + if o.else_body.is_unreachable: + self.tracker.skip_branch() o.else_body.accept(self) self.tracker.end_branch_statement() @@ -218,7 +222,10 @@ def visit_match_stmt(self, o: MatchStmt) -> None: guard = o.guards[i] if guard is not None: guard.accept(self) - o.bodies[i].accept(self) + if not o.bodies[i].is_unreachable: + o.bodies[i].accept(self) + else: + self.tracker.skip_branch() is_catchall = infer_pattern_value(pattern) == ALWAYS_TRUE if not is_catchall: self.tracker.next_branch() diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index d456568c1131..f6934fb142d1 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -367,3 +367,22 @@ def f() -> None: d = a d = b [builtins fixtures/tuple.pyi] + +[case testUnreachable] +# flags: --enable-error-code partially-defined +import typing + +if typing.TYPE_CHECKING: + x = 1 +elif int(): + y = 1 +else: + y = 2 +a = x + +if not typing.TYPE_CHECKING: + pass +else: + z = 1 +a = z +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 3b90a910e943..1967e7f4810b 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1789,6 +1789,30 @@ def f6(a: object) -> None: pass [builtins fixtures/tuple.pyi] +[case testPartiallyDefinedMatchUnreachable] +# flags: --enable-error-code partially-defined +import typing + +def f0(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + case _: + y = 3 + return y # No error. + +def f1(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + return y # E: Name "y" may be undefined + +[typing fixtures/typing-medium.pyi] + [case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] from typing import overload class C: From f656efc07c3d1e167d2d6873ffbfe254aa7225ff Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Tue, 22 Nov 2022 15:57:02 -0600 Subject: [PATCH 071/292] Detect variables that are used before they're defined (#14163) This implements a check when a variable is defined before use. Something like: ```python def foo() -> None: x = y y: int = 1 ``` I've combined this check with the partially defined check but added a separate error code. It's probably worth cleaning it up and making the separation between the two checks a bit more clear. I can do this in a follow-up PR. Fixes #14124 --- mypy/build.py | 4 +- mypy/errorcodes.py | 6 + mypy/messages.py | 3 + mypy/partially_defined.py | 129 +++++++++++++++----- test-data/unit/check-partially-defined.test | 46 +++++++ 5 files changed, 158 insertions(+), 30 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 62367c35915e..ba54c81845e0 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2341,7 +2341,9 @@ def type_check_second_pass(self) -> bool: def detect_partially_defined_vars(self, type_map: dict[Expression, Type]) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" manager = self.manager - if manager.errors.is_error_code_enabled(codes.PARTIALLY_DEFINED): + if manager.errors.is_error_code_enabled( + codes.PARTIALLY_DEFINED + ) or manager.errors.is_error_code_enabled(codes.USE_BEFORE_DEF): manager.errors.set_file(self.xpath, self.tree.fullname, options=manager.options) self.tree.accept( PartiallyDefinedVariableVisitor( diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index e1efc10b7a8b..1c15407a955b 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -192,6 +192,12 @@ def __str__(self) -> str: "General", default_enabled=False, ) +USE_BEFORE_DEF: Final[ErrorCode] = ErrorCode( + "use-before-def", + "Warn about variables that are used before they are defined", + "General", + default_enabled=False, +) # Syntax errors are often blocking. diff --git a/mypy/messages.py b/mypy/messages.py index 2f487972d647..85fa30512534 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1231,6 +1231,9 @@ def undefined_in_superclass(self, member: str, context: Context) -> None: def variable_may_be_undefined(self, name: str, context: Context) -> None: self.fail(f'Name "{name}" may be undefined', context, code=codes.PARTIALLY_DEFINED) + def var_used_before_def(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" is used before definition', context, code=codes.USE_BEFORE_DEF) + def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: actual = get_proper_type(actual) if isinstance(actual, Instance): diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 5854036c0df3..70a454beae9c 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -1,6 +1,6 @@ from __future__ import annotations -from mypy import checker +from mypy import checker, errorcodes from mypy.messages import MessageBuilder from mypy.nodes import ( AssertStmt, @@ -93,10 +93,24 @@ def skip_branch(self) -> None: assert len(self.branches) > 0 self.branches[-1].skipped = True - def is_possibly_undefined(self, name: str) -> bool: + def is_partially_defined(self, name: str) -> bool: assert len(self.branches) > 0 return name in self.branches[-1].may_be_defined + def is_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + branch = self.branches[-1] + return name not in branch.may_be_defined and name not in branch.must_be_defined + + def is_defined_in_different_branch(self, name: str) -> bool: + assert len(self.branches) > 0 + if not self.is_undefined(name): + return False + for b in self.branches[: len(self.branches) - 1]: + if name in b.must_be_defined or name in b.may_be_defined: + return True + return False + def done(self) -> BranchState: branches = [b for b in self.branches if not b.skipped] if len(branches) == 0: @@ -117,62 +131,102 @@ def done(self) -> BranchState: return BranchState(may_be_defined=may_be_defined, must_be_defined=must_be_defined) +class Scope: + def __init__(self, stmts: list[BranchStatement]) -> None: + self.branch_stmts: list[BranchStatement] = stmts + self.undefined_refs: dict[str, set[NameExpr]] = {} + + def record_undefined_ref(self, o: NameExpr) -> None: + if o.name not in self.undefined_refs: + self.undefined_refs[o.name] = set() + self.undefined_refs[o.name].add(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + return self.undefined_refs.pop(name, set()) + + class DefinedVariableTracker: """DefinedVariableTracker manages the state and scope for the UndefinedVariablesVisitor.""" def __init__(self) -> None: # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. - self.scopes: list[list[BranchStatement]] = [[BranchStatement(BranchState())]] + self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())])] - def _scope(self) -> list[BranchStatement]: + def _scope(self) -> Scope: assert len(self.scopes) > 0 return self.scopes[-1] def enter_scope(self) -> None: - assert len(self._scope()) > 0 - self.scopes.append([BranchStatement(self._scope()[-1].branches[-1])]) + assert len(self._scope().branch_stmts) > 0 + self.scopes.append(Scope([BranchStatement(self._scope().branch_stmts[-1].branches[-1])])) def exit_scope(self) -> None: self.scopes.pop() def start_branch_statement(self) -> None: - assert len(self._scope()) > 0 - self._scope().append(BranchStatement(self._scope()[-1].branches[-1])) + assert len(self._scope().branch_stmts) > 0 + self._scope().branch_stmts.append( + BranchStatement(self._scope().branch_stmts[-1].branches[-1]) + ) def next_branch(self) -> None: - assert len(self._scope()) > 1 - self._scope()[-1].next_branch() + assert len(self._scope().branch_stmts) > 1 + self._scope().branch_stmts[-1].next_branch() def end_branch_statement(self) -> None: - assert len(self._scope()) > 1 - result = self._scope().pop().done() - self._scope()[-1].record_nested_branch(result) + assert len(self._scope().branch_stmts) > 1 + result = self._scope().branch_stmts.pop().done() + self._scope().branch_stmts[-1].record_nested_branch(result) def skip_branch(self) -> None: # Only skip branch if we're outside of "root" branch statement. - if len(self._scope()) > 1: - self._scope()[-1].skip_branch() + if len(self._scope().branch_stmts) > 1: + self._scope().branch_stmts[-1].skip_branch() - def record_declaration(self, name: str) -> None: + def record_definition(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].record_definition(name) + + def record_undefined_ref(self, o: NameExpr) -> None: + """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" + assert len(self.scopes) > 0 + self._scope().record_undefined_ref(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + """If name has previously been reported as undefined, the NameExpr that was called will be returned.""" assert len(self.scopes) > 0 - assert len(self.scopes[-1]) > 0 - self._scope()[-1].record_definition(name) + return self._scope().pop_undefined_ref(name) - def is_possibly_undefined(self, name: str) -> bool: - assert len(self._scope()) > 0 + def is_partially_defined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`. - # Cases where a variable is not defined altogether are handled by semantic analyzer. - return self._scope()[-1].is_possibly_undefined(name) + return self._scope().branch_stmts[-1].is_partially_defined(name) + + def is_defined_in_different_branch(self, name: str) -> bool: + """This will return true if a variable is defined in a branch that's not the current branch.""" + assert len(self._scope().branch_stmts) > 0 + return self._scope().branch_stmts[-1].is_defined_in_different_branch(name) + + def is_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + return self._scope().branch_stmts[-1].is_undefined(name) class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): - """Detect variables that are defined only part of the time. + """Detects the following cases: + - A variable that's defined only part of the time. + - If a variable is used before definition - This visitor detects the following case: + An example of a partial definition: if foo(): x = 1 print(x) # Error: "x" may be undefined. + Example of a use before definition: + x = y + y: int = 2 + Note that this code does not detect variables not defined in any of the branches -- that is handled by the semantic analyzer. """ @@ -184,7 +238,11 @@ def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> Non def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): - self.tracker.record_declaration(lvalue.name) + # Was this name previously used? If yes, it's a use-before-definition error. + refs = self.tracker.pop_undefined_ref(lvalue.name) + for ref in refs: + self.msg.var_used_before_def(lvalue.name, ref) + self.tracker.record_definition(lvalue.name) elif isinstance(lvalue, (ListExpr, TupleExpr)): for item in lvalue.items: self.process_lvalue(item) @@ -239,7 +297,7 @@ def visit_func_def(self, o: FuncDef) -> None: def visit_func(self, o: FuncItem) -> None: if o.arguments is not None: for arg in o.arguments: - self.tracker.record_declaration(arg.variable.name) + self.tracker.record_definition(arg.variable.name) super().visit_func(o) def visit_generator_expr(self, o: GeneratorExpr) -> None: @@ -314,10 +372,23 @@ def visit_starred_pattern(self, o: StarredPattern) -> None: super().visit_starred_pattern(o) def visit_name_expr(self, o: NameExpr) -> None: - if self.tracker.is_possibly_undefined(o.name): - self.msg.variable_may_be_undefined(o.name, o) + if self.tracker.is_partially_defined(o.name): + # A variable is only defined in some branches. + if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): + self.msg.variable_may_be_undefined(o.name, o) # We don't want to report the error on the same variable multiple times. - self.tracker.record_declaration(o.name) + self.tracker.record_definition(o.name) + elif self.tracker.is_defined_in_different_branch(o.name): + # A variable is defined in one branch but used in a different branch. + self.msg.var_used_before_def(o.name, o) + elif self.tracker.is_undefined(o.name): + # A variable is undefined. It could be due to two things: + # 1. A variable is just totally undefined + # 2. The variable is defined later in the code. + # Case (1) will be caught by semantic analyzer. Case (2) is a forward ref that should + # be caught by this visitor. Save the ref for later, so that if we see a definition, + # we know it's a use-before-definition scenario. + self.tracker.record_undefined_ref(o) super().visit_name_expr(o) def visit_with_stmt(self, o: WithStmt) -> None: diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index f6934fb142d1..c63023aa2746 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -386,3 +386,49 @@ else: z = 1 a = z [typing fixtures/typing-medium.pyi] + +[case testUseBeforeDef] +# flags: --enable-error-code use-before-def + +def f0() -> None: + x = y # E: Name "y" is used before definition + y: int = 1 + +def f1() -> None: + if int(): + x = 0 + else: + y = x # E: Name "x" is used before definition + z = x # E: Name "x" is used before definition + +def f2() -> None: + x = 1 + if int(): + x = 0 + else: + y = x # No error. + +def f3() -> None: + if int(): + pass + else: + # No use-before-def error. + y = z # E: Name "z" is not defined + + def inner2() -> None: + z = 0 + +def f4() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + z: int = 2 + +def f5() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + x = z # E: Name "z" is used before definition + z: int = 2 From acbc40c50e0236a52bdd35583688fc391db1a410 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 23 Nov 2022 10:48:16 +0000 Subject: [PATCH 072/292] Don't install lxml on Windows on Python 3.11 (#14170) Windows wheels for Python 3.11 are not included in the release, and `cibuildwheels` cannot build them, this causes mypy wheel build failure on Windows for Python 3.11, see e.g. https://github.com/mypyc/mypy_mypyc-wheels/actions/runs/3527396237/jobs/5916408981 We need to do this until https://github.com/lxml/lxml/pull/360 is released (hopefully in next few weeks). cc @hauntsaninja @JukkaL --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 399785ce4c1c..6f0c1b065ad4 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,7 +7,7 @@ flake8==5.0.4 # must match version in .pre-commit-config.yaml flake8-bugbear==22.9.23 # must match version in .pre-commit-config.yaml flake8-noqa==1.2.9 # must match version in .pre-commit-config.yaml isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml -lxml>=4.9.1 +lxml>=4.9.1; python_version<'3.11' or sys_platform!='win32' psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 pytest>=6.2.4 From 07139ef8121fda39906e7a804ef599e61413c0c7 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 23 Nov 2022 14:29:52 +0000 Subject: [PATCH 073/292] Simplify callable overlap logic (#14174) This gives around 1% speed-up on self check, and may give even more for code that is heavy on overloads. --- mypy/meet.py | 19 +++++++------------ mypy/subtypes.py | 2 +- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/mypy/meet.py b/mypy/meet.py index f5cd4c1208da..5c187eeb37d4 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -438,18 +438,13 @@ def _type_object_overlap(left: Type, right: Type) -> bool: return _type_object_overlap(left, right) or _type_object_overlap(right, left) if isinstance(left, CallableType) and isinstance(right, CallableType): - - def _callable_overlap(left: CallableType, right: CallableType) -> bool: - return is_callable_compatible( - left, - right, - is_compat=_is_overlapping_types, - ignore_pos_arg_names=True, - allow_partial_overlap=True, - ) - - # Compare both directions to handle type objects. - return _callable_overlap(left, right) or _callable_overlap(right, left) + return is_callable_compatible( + left, + right, + is_compat=_is_overlapping_types, + ignore_pos_arg_names=True, + allow_partial_overlap=True, + ) elif isinstance(left, CallableType): left = left.fallback elif isinstance(right, CallableType): diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 14109587191c..a4b045cfa00c 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1361,7 +1361,7 @@ def g(x: int) -> int: ... ignore_pos_arg_names = True # Non-type cannot be a subtype of type. - if right.is_type_obj() and not left.is_type_obj(): + if right.is_type_obj() and not left.is_type_obj() and not allow_partial_overlap: return False # A callable L is a subtype of a generic callable R if L is a From 04d44c12cfbb3a469a96253d4656d16c21be41b9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 23 Nov 2022 14:31:29 +0000 Subject: [PATCH 074/292] Add intenal flag for per-line type checking peformance (#14173) This should help with the investigation of tricky performance regressions like https://github.com/python/mypy/issues/13821. I tried to implement in such a way that it will give minimal impact when not used (since I am touching a hot method). --- mypy/build.py | 30 ++++++++++++++++++++++++------ mypy/checker.py | 5 ++++- mypy/checkexpr.py | 29 ++++++++++++++++++++++++++--- mypy/main.py | 8 +++++++- mypy/options.py | 1 + mypy/util.py | 8 +++----- 6 files changed, 65 insertions(+), 16 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index ba54c81845e0..b32276dd3020 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -12,6 +12,7 @@ from __future__ import annotations +import collections import contextlib import errno import gc @@ -278,6 +279,8 @@ def _build( TypeState.reset_all_subtype_caches() if options.timing_stats is not None: dump_timing_stats(options.timing_stats, graph) + if options.line_checking_stats is not None: + dump_line_checking_stats(options.line_checking_stats, graph) return BuildResult(manager, graph) finally: t0 = time.time() @@ -1889,6 +1892,10 @@ class State: # Cumulative time spent on this file, in microseconds (for profiling stats) time_spent_us: int = 0 + # Per-line type-checking time (cumulative time spent type-checking expressions + # on a given source code line). + per_line_checking_time_ns: dict[int, int] + def __init__( self, id: str | None, @@ -1956,6 +1963,7 @@ def __init__( source = "" self.source = source self.add_ancestors() + self.per_line_checking_time_ns = collections.defaultdict(int) t0 = time.time() self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) self.manager.add_stats(validate_meta_time=time.time() - t0) @@ -2320,6 +2328,7 @@ def type_checker(self) -> TypeChecker: self.tree, self.xpath, manager.plugin, + self.per_line_checking_time_ns, ) return self._type_checker @@ -2945,13 +2954,22 @@ def dumps(self) -> str: def dump_timing_stats(path: str, graph: Graph) -> None: - """ - Dump timing stats for each file in the given graph - """ + """Dump timing stats for each file in the given graph.""" with open(path, "w") as f: - for k in sorted(graph.keys()): - v = graph[k] - f.write(f"{v.id} {v.time_spent_us}\n") + for id in sorted(graph): + f.write(f"{id} {graph[id].time_spent_us}\n") + + +def dump_line_checking_stats(path: str, graph: Graph) -> None: + """Dump per-line expression type checking stats.""" + with open(path, "w") as f: + for id in sorted(graph): + if not graph[id].per_line_checking_time_ns: + continue + f.write(f"{id}:\n") + for line in sorted(graph[id].per_line_checking_time_ns): + line_time = graph[id].per_line_checking_time_ns[line] + f.write(f"{line:>5} {line_time/1000:8.1f}\n") def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None: diff --git a/mypy/checker.py b/mypy/checker.py index b0fde94025b6..431fde299dc0 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -364,6 +364,7 @@ def __init__( tree: MypyFile, path: str, plugin: Plugin, + per_line_checking_time_ns: dict[int, int], ) -> None: """Construct a type checker. @@ -376,7 +377,9 @@ def __init__( self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin - self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg, self.plugin) + self.expr_checker = mypy.checkexpr.ExpressionChecker( + self, self.msg, self.plugin, per_line_checking_time_ns + ) self.pattern_checker = PatternChecker(self, self.msg, self.plugin) self.tscope = Scope() self.scope = CheckerScope(tree) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index b41a38825fb3..78ae412072f5 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3,6 +3,7 @@ from __future__ import annotations import itertools +import time from contextlib import contextmanager from typing import Callable, ClassVar, Iterator, List, Optional, Sequence, cast from typing_extensions import Final, TypeAlias as _TypeAlias, overload @@ -263,11 +264,22 @@ class ExpressionChecker(ExpressionVisitor[Type]): strfrm_checker: StringFormatterChecker plugin: Plugin - def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: + def __init__( + self, + chk: mypy.checker.TypeChecker, + msg: MessageBuilder, + plugin: Plugin, + per_line_checking_time_ns: dict[int, int], + ) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg self.plugin = plugin + self.per_line_checking_time_ns = per_line_checking_time_ns + self.collect_line_checking_stats = self.chk.options.line_checking_stats is not None + # Are we already visiting some expression? This is used to avoid double counting + # time for nested expressions. + self.in_expression = False self.type_context = [None] # Temporary overrides for expression types. This is currently @@ -4727,7 +4739,14 @@ def accept( applies only to this expression and not any subexpressions. """ if node in self.type_overrides: + # This branch is very fast, there is no point timing it. return self.type_overrides[node] + # We don't use context manager here to get most precise data (and avoid overhead). + record_time = False + if self.collect_line_checking_stats and not self.in_expression: + t0 = time.perf_counter_ns() + self.in_expression = True + record_time = True self.type_context.append(type_context) old_is_callee = self.is_callee self.is_callee = is_callee @@ -4762,9 +4781,13 @@ def accept( self.msg.disallowed_any_type(typ, node) if not self.chk.in_checked_function() or self.chk.current_node_deferred: - return AnyType(TypeOfAny.unannotated) + result: Type = AnyType(TypeOfAny.unannotated) else: - return typ + result = typ + if record_time: + self.per_line_checking_time_ns[node.line] += time.perf_counter_ns() - t0 + self.in_expression = False + return result def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type diff --git a/mypy/main.py b/mypy/main.py index 405596c20991..d0cb6ca4d505 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1082,8 +1082,14 @@ def add_invertible_flag( "--inferstats", action="store_true", dest="dump_inference_stats", help=argparse.SUPPRESS ) parser.add_argument("--dump-build-stats", action="store_true", help=argparse.SUPPRESS) - # dump timing stats for each processed file into the given output file + # Dump timing stats for each processed file into the given output file parser.add_argument("--timing-stats", dest="timing_stats", help=argparse.SUPPRESS) + # Dump per line type checking timing stats for each processed file into the given + # output file. Only total time spent in each top level expression will be shown. + # Times are show in microseconds. + parser.add_argument( + "--line-checking-stats", dest="line_checking_stats", help=argparse.SUPPRESS + ) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). diff --git a/mypy/options.py b/mypy/options.py index 3a08ff9455ee..ffb6b201e70b 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -283,6 +283,7 @@ def __init__(self) -> None: self.enable_incomplete_features = False # deprecated self.enable_incomplete_feature: list[str] = [] self.timing_stats: str | None = None + self.line_checking_stats: str | None = None # -- test options -- # Stop after the semantic analysis phase diff --git a/mypy/util.py b/mypy/util.py index 04ed616ade07..cced4db34fc9 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -807,13 +807,11 @@ def unnamed_function(name: str | None) -> bool: return name is not None and name == "_" -# TODO: replace with uses of perf_counter_ns when support for py3.6 is dropped -# (or when mypy properly handles alternate definitions based on python version check -time_ref = time.perf_counter +time_ref = time.perf_counter_ns -def time_spent_us(t0: float) -> int: - return int((time.perf_counter() - t0) * 1e6) +def time_spent_us(t0: int) -> int: + return int((time.perf_counter_ns() - t0) / 1000) def plural_s(s: int | Sized) -> str: From 13bd201586e837962239dbdca8eda1f9966ebfc2 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 24 Nov 2022 11:22:10 +0000 Subject: [PATCH 075/292] Support ParamSpec variables in type aliases (#14159) Fixes #11855 Fixes #7084 Fixes #10445 Should fix #4987 After thinking about this for some time, it looks like the best way to implement this is by switching type aliases from unbound to bound type variables. Then I can essentially simply share (or copy in one small place, to avoid cyclic imports) all the logic that currently exists for `ParamSpec` and `Concatenate` in `expand_type()` etc. This will also address a big piece of tech debt, and will get some benefits (almost) for free, such as checking bounds/values for alias type variables, and much tighter handling of unbound type variables. Note that in this PR I change logic for emitting some errors, I try to avoid showing multiple errors for the same location/reason. But this is not an essential part of this PR (it is just some test cases would otherwise fail with even more error messages), I can reconsider if there are objections. --- docs/source/generics.rst | 8 +- mypy/checker.py | 2 +- mypy/checkexpr.py | 6 +- mypy/erasetype.py | 4 +- mypy/expandtype.py | 34 +--- mypy/fixup.py | 2 + mypy/mixedtraverser.py | 5 + mypy/nodes.py | 31 ++-- mypy/semanal.py | 94 +++++++---- mypy/semanal_typeargs.py | 90 ++++++++--- mypy/semanal_typeddict.py | 4 +- mypy/server/astdiff.py | 2 +- mypy/server/astmerge.py | 2 + mypy/subtypes.py | 22 ++- mypy/test/testtypes.py | 3 +- mypy/test/typefixture.py | 5 +- mypy/typeanal.py | 135 +++++++++------- mypy/types.py | 83 ++++++++-- mypy/typetraverser.py | 3 + test-data/unit/check-generics.test | 30 +++- test-data/unit/check-isinstance.test | 5 - test-data/unit/check-literal.test | 25 +-- .../unit/check-parameter-specification.test | 152 +++++++++++++++++- test-data/unit/check-type-aliases.test | 35 ++++ 24 files changed, 554 insertions(+), 228 deletions(-) diff --git a/docs/source/generics.rst b/docs/source/generics.rst index a5c7b8accaa8..a867bc863c83 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -916,9 +916,5 @@ defeating the purpose of using aliases. Example: OIntVec = Optional[Vec[int]] -.. note:: - - A type alias does not define a new type. For generic type aliases - this means that variance of type variables used for alias definition does not - apply to aliases. A parameterized generic alias is treated simply as an original - type with the corresponding type variables substituted. +Using type variable bounds or values in generic aliases, has the same effect +as in generic classes/functions. diff --git a/mypy/checker.py b/mypy/checker.py index 431fde299dc0..f9acc9766140 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7115,7 +7115,7 @@ def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the alias cannot by an ambiguous , so we just + # Target of the alias cannot be an ambiguous , so we just # replace the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 78ae412072f5..362ef1eeb7f8 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3854,10 +3854,8 @@ def visit_type_application(self, tapp: TypeApplication) -> Type: There are two different options here, depending on whether expr refers to a type alias or directly to a generic class. In the first case we need - to use a dedicated function typeanal.expand_type_aliases. This - is due to the fact that currently type aliases machinery uses - unbound type variables, while normal generics use bound ones; - see TypeAlias docstring for more details. + to use a dedicated function typeanal.expand_type_alias(). This + is due to some differences in how type arguments are applied and checked. """ if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): # Subscription of a (generic) alias in runtime context, expand the alias. diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 89c07186f44a..6533d0c4e0f9 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -176,8 +176,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Type alias target can't contain bound type variables, so - # it is safe to just erase the arguments. + # Type alias target can't contain bound type variables (not bound by the type + # alias itself), so it is safe to just erase the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 43f4e6bcd75b..d3286480e316 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -15,7 +15,6 @@ NoneType, Overloaded, Parameters, - ParamSpecFlavor, ParamSpecType, PartialType, ProperType, @@ -34,6 +33,7 @@ UninhabitedType, UnionType, UnpackType, + expand_param_spec, get_proper_type, ) from mypy.typevartuples import ( @@ -212,32 +212,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: # TODO: what does prefix mean in this case? # TODO: why does this case even happen? Instances aren't plural. return repl - elif isinstance(repl, ParamSpecType): - return repl.copy_modified( - flavor=t.flavor, - prefix=t.prefix.copy_modified( - arg_types=t.prefix.arg_types + repl.prefix.arg_types, - arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, - arg_names=t.prefix.arg_names + repl.prefix.arg_names, - ), - ) - elif isinstance(repl, Parameters) or isinstance(repl, CallableType): - # if the paramspec is *P.args or **P.kwargs: - if t.flavor != ParamSpecFlavor.BARE: - assert isinstance(repl, CallableType), "Should not be able to get here." - # Is this always the right thing to do? - param_spec = repl.param_spec() - if param_spec: - return param_spec.with_flavor(t.flavor) - else: - return repl - else: - return Parameters( - t.prefix.arg_types + repl.arg_types, - t.prefix.arg_kinds + repl.arg_kinds, - t.prefix.arg_names + repl.arg_names, - variables=[*t.prefix.variables, *repl.variables], - ) + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(t, repl) else: # TODO: should this branch be removed? better not to fail silently return repl @@ -446,8 +422,8 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(item) def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the type alias cannot contain type variables, - # so we just expand the arguments. + # Target of the type alias cannot contain type variables (not bound by the type + # alias itself), so we just expand the arguments. return t.copy_modified(args=self.expand_types(t.args)) def expand_types(self, types: Iterable[Type]) -> list[Type]: diff --git a/mypy/fixup.py b/mypy/fixup.py index b3a2d43d6b4d..3593e4faa184 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -180,6 +180,8 @@ def visit_var(self, v: Var) -> None: def visit_type_alias(self, a: TypeAlias) -> None: a.target.accept(self.type_fixer) + for v in a.alias_tvars: + v.accept(self.type_fixer) class TypeFixer(TypeVisitor[None]): diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py index d25e9b9b0137..771f87fc6bd6 100644 --- a/mypy/mixedtraverser.py +++ b/mypy/mixedtraverser.py @@ -25,6 +25,9 @@ class MixedTraverserVisitor(TraverserVisitor, TypeTraverserVisitor): """Recursive traversal of both Node and Type objects.""" + def __init__(self) -> None: + self.in_type_alias_expr = False + # Symbol nodes def visit_var(self, var: Var) -> None: @@ -45,7 +48,9 @@ def visit_class_def(self, o: ClassDef) -> None: def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: super().visit_type_alias_expr(o) + self.in_type_alias_expr = True o.type.accept(self) + self.in_type_alias_expr = False def visit_type_var_expr(self, o: TypeVarExpr) -> None: super().visit_type_var_expr(o) diff --git a/mypy/nodes.py b/mypy/nodes.py index ebf2f5cb271a..f0fc13dad780 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -12,6 +12,7 @@ Callable, Dict, Iterator, + List, Optional, Sequence, Tuple, @@ -2546,7 +2547,7 @@ class TypeAliasExpr(Expression): # The target type. type: mypy.types.Type - # Names of unbound type variables used to define the alias + # Names of type variables used to define the alias tvars: list[str] # Whether this alias was defined in bare form. Used to distinguish # between @@ -2559,7 +2560,7 @@ class TypeAliasExpr(Expression): def __init__(self, node: TypeAlias) -> None: super().__init__() self.type = node.target - self.tvars = node.alias_tvars + self.tvars = [v.name for v in node.alias_tvars] self.no_args = node.no_args self.node = node @@ -3309,10 +3310,9 @@ class TypeAlias(SymbolNode): class-valued attributes. See SemanticAnalyzerPass2.check_and_set_up_type_alias for details. - Aliases can be generic. Currently, mypy uses unbound type variables for - generic aliases and identifies them by name. Essentially, type aliases - work as macros that expand textually. The definition and expansion rules are - following: + Aliases can be generic. We use bound type variables for generic aliases, similar + to classes. Essentially, type aliases work as macros that expand textually. + The definition and expansion rules are following: 1. An alias targeting a generic class without explicit variables act as the given class (this doesn't apply to TypedDict, Tuple and Callable, which @@ -3363,11 +3363,11 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here Meaning of other fields: - target: The target type. For generic aliases contains unbound type variables - as nested types. + target: The target type. For generic aliases contains bound type variables + as nested types (currently TypeVar and ParamSpec are supported). _fullname: Qualified name of this type alias. This is used in particular to track fine grained dependencies from aliases. - alias_tvars: Names of unbound type variables used to define this alias. + alias_tvars: Type variables used to define this alias. normalized: Used to distinguish between `A = List`, and `A = list`. Both are internally stored using `builtins.list` (because `typing.List` is itself an alias), while the second cannot be subscripted because of @@ -3396,7 +3396,7 @@ def __init__( line: int, column: int, *, - alias_tvars: list[str] | None = None, + alias_tvars: list[mypy.types.TypeVarLikeType] | None = None, no_args: bool = False, normalized: bool = False, eager: bool = False, @@ -3446,12 +3446,16 @@ def name(self) -> str: def fullname(self) -> str: return self._fullname + @property + def has_param_spec_type(self) -> bool: + return any(isinstance(v, mypy.types.ParamSpecType) for v in self.alias_tvars) + def serialize(self) -> JsonDict: data: JsonDict = { ".class": "TypeAlias", "fullname": self._fullname, "target": self.target.serialize(), - "alias_tvars": self.alias_tvars, + "alias_tvars": [v.serialize() for v in self.alias_tvars], "no_args": self.no_args, "normalized": self.normalized, "line": self.line, @@ -3466,7 +3470,8 @@ def accept(self, visitor: NodeVisitor[T]) -> T: def deserialize(cls, data: JsonDict) -> TypeAlias: assert data[".class"] == "TypeAlias" fullname = data["fullname"] - alias_tvars = data["alias_tvars"] + alias_tvars = [mypy.types.deserialize_type(v) for v in data["alias_tvars"]] + assert all(isinstance(t, mypy.types.TypeVarLikeType) for t in alias_tvars) target = mypy.types.deserialize_type(data["target"]) no_args = data["no_args"] normalized = data["normalized"] @@ -3477,7 +3482,7 @@ def deserialize(cls, data: JsonDict) -> TypeAlias: fullname, line, column, - alias_tvars=alias_tvars, + alias_tvars=cast(List[mypy.types.TypeVarLikeType], alias_tvars), no_args=no_args, normalized=normalized, ) diff --git a/mypy/semanal.py b/mypy/semanal.py index a5ddcc70eed6..74ab1c1c6f30 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -50,7 +50,7 @@ from __future__ import annotations -from contextlib import contextmanager +from contextlib import contextmanager, nullcontext from typing import Any, Callable, Collection, Iterable, Iterator, List, TypeVar, cast from typing_extensions import Final, TypeAlias as _TypeAlias @@ -459,6 +459,11 @@ def __init__( # rvalues while temporarily setting this to True. self.basic_type_applications = False + # Used to temporarily enable unbound type variables in some contexts. Namely, + # in base class expressions, and in right hand sides of type aliases. Do not add + # new uses of this, as this may cause leaking `UnboundType`s to type checking. + self.allow_unbound_tvars = False + # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties @property @@ -477,6 +482,15 @@ def is_typeshed_stub_file(self) -> bool: def final_iteration(self) -> bool: return self._final_iteration + @contextmanager + def allow_unbound_tvars_set(self) -> Iterator[None]: + old = self.allow_unbound_tvars + self.allow_unbound_tvars = True + try: + yield + finally: + self.allow_unbound_tvars = old + # # Preparing module (performed before semantic analysis) # @@ -1599,7 +1613,7 @@ def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> N def setup_alias_type_vars(self, defn: ClassDef) -> None: assert defn.info.special_alias is not None - defn.info.special_alias.alias_tvars = list(defn.info.type_vars) + defn.info.special_alias.alias_tvars = list(defn.type_vars) target = defn.info.special_alias.target assert isinstance(target, ProperType) if isinstance(target, TypedDictType): @@ -2631,7 +2645,10 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: # when analysing any type applications there) thus preventing the further analysis. # To break the tie, we first analyse rvalue partially, if it can be a type alias. with self.basic_type_applications_set(s): - s.rvalue.accept(self) + with self.allow_unbound_tvars_set() if self.can_possibly_be_index_alias( + s + ) else nullcontext(): + s.rvalue.accept(self) if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): # Initializer couldn't be fully analyzed. Defer the current node and give up. # Make sure that if we skip the definition of some local names, they can't be @@ -2642,7 +2659,8 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: if self.can_possibly_be_index_alias(s): # Now re-visit those rvalues that were we skipped type applications above. # This should be safe as generally semantic analyzer is idempotent. - s.rvalue.accept(self) + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) # The r.h.s. is now ready to be classified, first check if it is a special form: special_form = False @@ -3272,42 +3290,56 @@ def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Typ return None def analyze_alias( - self, rvalue: Expression, allow_placeholder: bool = False - ) -> tuple[Type | None, list[str], set[str], list[str]]: + self, name: str, rvalue: Expression, allow_placeholder: bool = False + ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str]]: """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type, a list of qualified type variable names for generic aliases, a set of names the alias depends on, and a list of type variables if the alias is generic. - An schematic example for the dependencies: + A schematic example for the dependencies: A = int B = str analyze_alias(Dict[A, B])[2] == {'__main__.A', '__main__.B'} """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack - res = analyze_type_alias( - rvalue, - self, - self.tvar_scope, - self.plugin, - self.options, - self.is_typeshed_stub_file, - allow_placeholder=allow_placeholder, - in_dynamic_func=dynamic, - global_scope=global_scope, - ) - typ: Type | None = None + try: + typ = expr_to_unanalyzed_type(rvalue, self.options, self.is_stub_file) + except TypeTranslationError: + self.fail( + "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE + ) + return None, [], set(), [] + + found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + tvar_defs: list[TypeVarLikeType] = [] + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + for name, tvar_expr in found_type_vars: + tvar_def = self.tvar_scope.bind_new(name, tvar_expr) + tvar_defs.append(tvar_def) + + res = analyze_type_alias( + typ, + self, + self.tvar_scope, + self.plugin, + self.options, + self.is_typeshed_stub_file, + allow_placeholder=allow_placeholder, + in_dynamic_func=dynamic, + global_scope=global_scope, + allowed_alias_tvars=tvar_defs, + ) + analyzed: Type | None = None if res: - typ, depends_on = res - found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) - alias_tvars = [name for (name, node) in found_type_vars] + analyzed, depends_on = res qualified_tvars = [node.fullname for (name, node) in found_type_vars] else: - alias_tvars = [] depends_on = set() qualified_tvars = [] - return typ, alias_tvars, depends_on, qualified_tvars + return analyzed, tvar_defs, depends_on, qualified_tvars def is_pep_613(self, s: AssignmentStmt) -> bool: if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType): @@ -3387,13 +3419,13 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: res: Type | None = None if self.is_none_alias(rvalue): res = NoneType() - alias_tvars: list[str] = [] + alias_tvars: list[TypeVarLikeType] = [] depends_on: set[str] = set() qualified_tvars: list[str] = [] else: tag = self.track_incomplete_refs() res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias( - rvalue, allow_placeholder=True + lvalue.name, rvalue, allow_placeholder=True ) if not res: return False @@ -4978,12 +5010,12 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: except TypeTranslationError: self.fail("Type expected within [...]", expr) return None - # We always allow unbound type variables in IndexExpr, since we - # may be analysing a type alias definition rvalue. The error will be - # reported elsewhere if it is not the case. analyzed = self.anal_type( typearg, - allow_unbound_tvars=True, + # The type application may appear in base class expression, + # where type variables are not bound yet. Or when accepting + # r.h.s. of type alias before we figured out it is a type alias. + allow_unbound_tvars=self.allow_unbound_tvars, allow_placeholder=True, allow_param_spec_literals=has_param_spec, ) @@ -6187,7 +6219,7 @@ def analyze_type_expr(self, expr: Expression) -> None: # them semantically analyzed, however, if they need to treat it as an expression # and not a type. (Which is to say, mypyc needs to do this.) Do the analysis # in a fresh tvar scope in order to suppress any errors about using type variables. - with self.tvar_scope_frame(TypeVarLikeScope()): + with self.tvar_scope_frame(TypeVarLikeScope()), self.allow_unbound_tvars_set(): expr.accept(self) def type_analyzer( diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 72903423116f..b9965236c379 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,23 +7,27 @@ from __future__ import annotations +from typing import Sequence + from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode from mypy.errors import Errors from mypy.messages import format_type from mypy.mixedtraverser import MixedTraverserVisitor -from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile, TypeInfo +from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype from mypy.types import ( AnyType, Instance, + Parameters, ParamSpecType, TupleType, Type, TypeAliasType, TypeOfAny, + TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, @@ -35,6 +39,7 @@ class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: + super().__init__() self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file @@ -77,7 +82,12 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # correct aliases. if t.alias and len(t.args) != len(t.alias.alias_tvars): t.args = [AnyType(TypeOfAny.from_error) for _ in t.alias.alias_tvars] - get_proper_type(t).accept(self) + assert t.alias is not None, f"Unfixed type alias {t.type_ref}" + is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) + if not is_error: + # If there was already an error for the alias itself, there is no point in checking + # the expansion, most likely it will result in the same kind of error. + get_proper_type(t).accept(self) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume @@ -85,36 +95,67 @@ def visit_instance(self, t: Instance) -> None: info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 - for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): + self.validate_args(info.name, t.args, info.defn.type_vars, t) + super().visit_instance(t) + + def validate_args( + self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context + ) -> bool: + is_error = False + for (i, arg), tvar in zip(enumerate(args), type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): # TODO: Better message - self.fail(f'Invalid location for ParamSpec "{arg.name}"', t) + is_error = True + self.fail(f'Invalid location for ParamSpec "{arg.name}"', ctx) + self.note( + "You can use ParamSpec as the first argument to Callable, e.g., " + "'Callable[{}, int]'".format(arg.name), + ctx, + ) continue if tvar.values: if isinstance(arg, TypeVarType): + if self.in_type_alias_expr: + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue arg_values = arg.values if not arg_values: + is_error = True self.fail( - message_registry.INVALID_TYPEVAR_AS_TYPEARG.format( - arg.name, info.name - ), - t, + message_registry.INVALID_TYPEVAR_AS_TYPEARG.format(arg.name, name), + ctx, code=codes.TYPE_VAR, ) continue else: arg_values = [arg] - self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) + if self.check_type_var_values(name, arg_values, tvar.name, tvar.values, ctx): + is_error = True if not is_subtype(arg, tvar.upper_bound): + if self.in_type_alias_expr and isinstance(arg, TypeVarType): + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue + is_error = True self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( - format_type(arg), info.name, format_type(tvar.upper_bound) + format_type(arg), name, format_type(tvar.upper_bound) ), - t, + ctx, code=codes.TYPE_VAR, ) - super().visit_instance(t) + elif isinstance(tvar, ParamSpecType): + if not isinstance( + get_proper_type(arg), (ParamSpecType, Parameters, AnyType, UnboundType) + ): + self.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg)}", + ctx, + ) + return is_error def visit_unpack_type(self, typ: UnpackType) -> None: proper_type = get_proper_type(typ.type) @@ -132,28 +173,25 @@ def visit_unpack_type(self, typ: UnpackType) -> None: self.fail(message_registry.INVALID_UNPACK.format(proper_type), typ) def check_type_var_values( - self, - type: TypeInfo, - actuals: list[Type], - arg_name: str, - valids: list[Type], - arg_number: int, - context: Context, - ) -> None: + self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context + ) -> bool: + is_error = False for actual in get_proper_types(actuals): - # TODO: bind type variables in class bases/alias targets - # so we can safely check this, currently we miss some errors. + # We skip UnboundType here, since they may appear in defn.bases, + # the error will be caught when visiting info.bases, that have bound type + # variables. if not isinstance(actual, (AnyType, UnboundType)) and not any( is_same_type(actual, value) for value in valids ): + is_error = True if len(actuals) > 1 or not isinstance(actual, Instance): self.fail( - message_registry.INVALID_TYPEVAR_ARG_VALUE.format(type.name), + message_registry.INVALID_TYPEVAR_ARG_VALUE.format(name), context, code=codes.TYPE_VAR, ) else: - class_name = f'"{type.name}"' + class_name = f'"{name}"' actual_type_name = f'"{actual.type.name}"' self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( @@ -162,6 +200,10 @@ def check_type_var_values( context, code=codes.TYPE_VAR, ) + return is_error def fail(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: self.errors.report(context.line, context.column, msg, code=code) + + def note(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: + self.errors.report(context.line, context.column, msg, severity="note", code=code) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index fb45dcc0dfc4..cd3d02bc6bb8 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -189,7 +189,7 @@ def add_keys_and_types_from_base( valid_items = base_items.copy() # Always fix invalid bases to avoid crashes. - tvars = info.type_vars + tvars = info.defn.type_vars if len(base_args) != len(tvars): any_kind = TypeOfAny.from_omitted_generics if base_args: @@ -235,7 +235,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: return base_args def map_items_to_base( - self, valid_items: dict[str, Type], tvars: list[str], base_args: list[Type] + self, valid_items: dict[str, Type], tvars: list[TypeVarLikeType], base_args: list[Type] ) -> dict[str, Type]: """Map item types to how they would look in their base with type arguments applied. diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 41a79db480c9..97f811384d37 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -187,7 +187,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna elif isinstance(node, TypeAlias): result[name] = ( "TypeAlias", - node.alias_tvars, + snapshot_types(node.alias_tvars), node.normalized, node.no_args, snapshot_optional_type(node.target), diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index a14335acca7e..04422036b67b 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -331,6 +331,8 @@ def visit_var(self, node: Var) -> None: def visit_type_alias(self, node: TypeAlias) -> None: self.fixup_type(node.target) + for v in node.alias_tvars: + self.fixup_type(v) super().visit_type_alias(node) # Helpers diff --git a/mypy/subtypes.py b/mypy/subtypes.py index a4b045cfa00c..e4667c45fbc5 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -631,6 +631,8 @@ def visit_param_spec(self, left: ParamSpecType) -> bool: and right.flavor == left.flavor ): return True + if isinstance(right, Parameters) and are_trivial_parameters(right): + return True return self._is_subtype(left.upper_bound, self.right) def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: @@ -1415,6 +1417,18 @@ def g(x: int) -> int: ... ) +def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + def are_parameters_compatible( left: Parameters | NormalizedCallableType, right: Parameters | NormalizedCallableType, @@ -1435,13 +1449,7 @@ def are_parameters_compatible( right_star2 = right.kw_arg() # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" - if ( - right.arg_kinds == [ARG_STAR, ARG_STAR2] - and right_star - and isinstance(get_proper_type(right_star.typ), AnyType) - and right_star2 - and isinstance(get_proper_type(right_star2.typ), AnyType) - ): + if are_trivial_parameters(right): return True # Match up corresponding arguments and check them for compatibility. In diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 18948ee7f6d6..ee0256e2057a 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -160,7 +160,8 @@ def test_type_alias_expand_all(self) -> None: def test_recursive_nested_in_non_recursive(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) - NA = self.fx.non_rec_alias(Instance(self.fx.gi, [UnboundType("T")]), ["T"], [A]) + T = TypeVarType("T", "T", -1, [], self.fx.o) + NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) assert not NA.is_recursive assert has_recursive_types(NA) diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index 93e5e4b0b5ca..bd8351171208 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -340,7 +340,10 @@ def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: return A, target def non_rec_alias( - self, target: Type, alias_tvars: list[str] | None = None, args: list[Type] | None = None + self, + target: Type, + alias_tvars: list[TypeVarLikeType] | None = None, + args: list[Type] | None = None, ) -> TypeAliasType: AN = TypeAlias(target, "__main__.A", -1, -1, alias_tvars=alias_tvars) if args is None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index f22fa30706c4..f34f6ef49f6c 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -10,7 +10,6 @@ from mypy import errorcodes as codes, message_registry, nodes from mypy.errorcodes import ErrorCode -from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type from mypy.messages import MessageBuilder, format_type_bare, quote_type_string, wrong_type_arg_count from mypy.nodes import ( ARG_NAMED, @@ -23,7 +22,6 @@ ArgKind, Context, Decorator, - Expression, MypyFile, ParamSpecExpr, PlaceholderNode, @@ -87,6 +85,7 @@ callable_with_ellipsis, flatten_nested_unions, get_proper_type, + has_type_vars, ) from mypy.typetraverser import TypeTraverserVisitor from mypy.typevars import fill_typevars @@ -122,7 +121,7 @@ def analyze_type_alias( - node: Expression, + type: Type, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarLikeScope, plugin: Plugin, @@ -131,6 +130,7 @@ def analyze_type_alias( allow_placeholder: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, ) -> tuple[Type, set[str]] | None: """Analyze r.h.s. of a (potential) type alias definition. @@ -138,11 +138,6 @@ def analyze_type_alias( full names of type aliases it depends on (directly or indirectly). Return None otherwise. 'node' must have been semantically analyzed. """ - try: - type = expr_to_unanalyzed_type(node, options, api.is_stub_file) - except TypeTranslationError: - api.fail("Invalid type alias: expression is not a valid type", node, code=codes.VALID_TYPE) - return None analyzer = TypeAnalyser( api, tvar_scope, @@ -152,6 +147,7 @@ def analyze_type_alias( defining_alias=True, allow_placeholder=allow_placeholder, prohibit_self_type="type alias target", + allowed_alias_tvars=allowed_alias_tvars, ) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope @@ -201,6 +197,7 @@ def __init__( allow_param_spec_literals: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, allow_type_any: bool = False, ) -> None: self.api = api @@ -219,8 +216,12 @@ def __init__( self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set( "annotations" ) - # Should we accept unbound type variables (always OK in aliases)? - self.allow_unbound_tvars = allow_unbound_tvars or defining_alias + # Should we accept unbound type variables? This is currently used for class bases, + # and alias right hand sides (before they are analyzed as type aliases). + self.allow_unbound_tvars = allow_unbound_tvars + if allowed_alias_tvars is None: + allowed_alias_tvars = [] + self.allowed_alias_tvars = allowed_alias_tvars # If false, record incomplete ref if we generate PlaceholderType. self.allow_placeholder = allow_placeholder # Are we in a context where Required[] is allowed? @@ -263,7 +264,12 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.api.defer() else: self.api.record_incomplete_ref() - return PlaceholderType(node.fullname, self.anal_array(t.args), t.line) + # Always allow ParamSpec for placeholders, if they are actually not valid, + # they will be reported later, after we resolve placeholders. + with self.set_allow_param_spec_literals(True): + return PlaceholderType( + node.fullname, self.anal_array(t.args, allow_param_spec=True), t.line + ) else: if self.api.final_iteration: self.cannot_resolve_type(t) @@ -290,6 +296,8 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) tvar_def = self.tvar_scope.get_binding(sym) if isinstance(sym.node, ParamSpecExpr): if tvar_def is None: + if self.allow_unbound_tvars: + return t self.fail(f'ParamSpec "{t.name}" is unbound', t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, ParamSpecType) @@ -307,7 +315,12 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) line=t.line, column=t.column, ) - if isinstance(sym.node, TypeVarExpr) and tvar_def is not None and self.defining_alias: + if ( + isinstance(sym.node, TypeVarExpr) + and self.defining_alias + and not defining_literal + and (tvar_def is None or tvar_def not in self.allowed_alias_tvars) + ): self.fail( f'Can\'t use bound type variable "{t.name}" to define generic alias', t, @@ -332,7 +345,9 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) column=t.column, ) if isinstance(sym.node, TypeVarTupleExpr) and ( - tvar_def is not None and self.defining_alias + tvar_def is not None + and self.defining_alias + and tvar_def not in self.allowed_alias_tvars ): self.fail( f'Can\'t use bound type variable "{t.name}" to define generic alias', @@ -363,7 +378,11 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) - an_args = self.anal_array(t.args) + with self.set_allow_param_spec_literals(node.has_param_spec_type): + an_args = self.anal_array(t.args, allow_param_spec=True) + if node.has_param_spec_type and len(node.alias_tvars) == 1: + an_args = self.pack_paramspec_args(an_args) + disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = expand_type_alias( node, @@ -406,6 +425,17 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) else: # sym is None return AnyType(TypeOfAny.special_form) + def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: + # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]]. + # These do not support mypy_extensions VarArgs, etc. as they were already analyzed + # TODO: should these be re-analyzed to get rid of this inconsistency? + count = len(an_args) + if count > 0: + first_arg = get_proper_type(an_args[0]) + if not (count == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType))): + return [Parameters(an_args, [ARG_POS] * count, [None] * count)] + return list(an_args) + def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first # need access to MessageBuilder here. Also move the similar @@ -422,6 +452,10 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: # last argument has to be ParamSpec ps = self.anal_type(t.args[-1], allow_param_spec=True) if not isinstance(ps, ParamSpecType): + if isinstance(ps, UnboundType) and self.allow_unbound_tvars: + sym = self.lookup_qualified(ps.name, t) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + return ps self.api.fail( "The last parameter to Concatenate needs to be a ParamSpec", t, @@ -633,25 +667,8 @@ def analyze_type_with_type_info( instance = Instance( info, self.anal_array(args, allow_param_spec=True), ctx.line, ctx.column ) - - # "aesthetic" paramspec literals - # these do not support mypy_extensions VarArgs, etc. as they were already analyzed - # TODO: should these be re-analyzed to get rid of this inconsistency? - # another inconsistency is with empty type args (Z[] is more possibly an error imo) - if len(info.type_vars) == 1 and info.has_param_spec_type and len(instance.args) > 0: - first_arg = get_proper_type(instance.args[0]) - - # TODO: can I use tuple syntax to isinstance multiple in 3.6? - if not ( - len(instance.args) == 1 - and ( - isinstance(first_arg, Parameters) - or isinstance(first_arg, ParamSpecType) - or isinstance(first_arg, AnyType) - ) - ): - args = instance.args - instance.args = (Parameters(args, [ARG_POS] * len(args), [None] * len(args)),) + if len(info.type_vars) == 1 and info.has_param_spec_type: + instance.args = tuple(self.pack_paramspec_args(instance.args)) if info.has_type_var_tuple_type: # - 1 to allow for the empty type var tuple case. @@ -676,6 +693,7 @@ def analyze_type_with_type_info( if info.special_alias: return expand_type_alias( info.special_alias, + # TODO: should we allow NamedTuples generic in ParamSpec? self.anal_array(args), self.fail, False, @@ -690,6 +708,7 @@ def analyze_type_with_type_info( if info.special_alias: return expand_type_alias( info.special_alias, + # TODO: should we allow TypedDicts generic in ParamSpec? self.anal_array(args), self.fail, False, @@ -810,9 +829,11 @@ def analyze_unbound_type_without_type_info( ) else: message = 'Cannot interpret reference "{}" as a type' - self.fail(message.format(name), t, code=codes.VALID_TYPE) - for note in notes: - self.note(note, t, code=codes.VALID_TYPE) + if not defining_literal: + # Literal check already gives a custom error. Avoid duplicating errors. + self.fail(message.format(name), t, code=codes.VALID_TYPE) + for note in notes: + self.note(note, t, code=codes.VALID_TYPE) # TODO: Would it be better to always return Any instead of UnboundType # in case of an error? On one hand, UnboundType has a name so error messages @@ -1102,6 +1123,16 @@ def analyze_callable_args_for_paramspec( return None tvar_def = self.tvar_scope.get_binding(sym) if not isinstance(tvar_def, ParamSpecType): + if ( + tvar_def is None + and self.allow_unbound_tvars + and isinstance(sym.node, ParamSpecExpr) + ): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None return CallableType( @@ -1137,6 +1168,14 @@ def analyze_callable_args_for_concatenate( tvar_def = self.anal_type(callable_args, allow_param_spec=True) if not isinstance(tvar_def, ParamSpecType): + if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): + sym = self.lookup_qualified(tvar_def.name, callable_args) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None # ick, CallableType should take ParamSpecType @@ -1637,12 +1676,12 @@ def expand_type_alias( """Expand a (generic) type alias target following the rules outlined in TypeAlias docstring. Here: - target: original target type (contains unbound type variables) - alias_tvars: type variable names + target: original target type args: types to be substituted in place of type variables fail: error reporter callback no_args: whether original definition used a bare generic `A = List` ctx: context where expansion happens + unexpanded_type, disallow_any, use_standard_error: used to customize error messages """ exp_len = len(node.alias_tvars) act_len = len(args) @@ -1682,6 +1721,9 @@ def expand_type_alias( msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}" fail(msg, ctx, code=codes.TYPE_ARG) return set_any_tvars(node, ctx.line, ctx.column, from_error=True) + # TODO: we need to check args validity w.r.t alias.alias_tvars. + # Otherwise invalid instantiations will be allowed in runtime context. + # Note: in type context, these will be still caught by semanal_typeargs. typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. @@ -1822,26 +1864,11 @@ def __init__( self.scope = scope self.diverging = False - def is_alias_tvar(self, t: Type) -> bool: - # Generic type aliases use unbound type variables. - if not isinstance(t, UnboundType) or t.args: - return False - node = self.lookup(t.name, t) - if ( - node - and isinstance(node.node, TypeVarLikeExpr) - and self.scope.get_binding(node) is None - ): - return True - return False - def visit_type_alias_type(self, t: TypeAliasType) -> Type: assert t.alias is not None, f"Unfixed type alias {t.type_ref}" if t.alias in self.seen_nodes: for arg in t.args: - if not self.is_alias_tvar(arg) and bool( - arg.accept(TypeVarLikeQuery(self.lookup, self.scope)) - ): + if not isinstance(arg, TypeVarLikeType) and has_type_vars(arg): self.diverging = True return t # All clear for this expansion chain. diff --git a/mypy/types.py b/mypy/types.py index 78142d9003d9..7d2ac9911bef 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3202,24 +3202,45 @@ def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> TypeGuard[In class InstantiateAliasVisitor(TrivialSyntheticTypeTranslator): - def __init__(self, vars: list[str], subs: list[Type]) -> None: - self.replacements = {v: s for (v, s) in zip(vars, subs)} + def __init__(self, vars: list[TypeVarLikeType], subs: list[Type]) -> None: + self.replacements = {v.id: s for (v, s) in zip(vars, subs)} def visit_type_alias_type(self, typ: TypeAliasType) -> Type: return typ.copy_modified(args=[t.accept(self) for t in typ.args]) - def visit_unbound_type(self, typ: UnboundType) -> Type: - # TODO: stop using unbound type variables for type aliases. - # Now that type aliases are very similar to TypeInfos we should - # make type variable tracking similar as well. Maybe we can even support - # upper bounds etc. for generic type aliases. - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_type_var(self, typ: TypeVarType) -> Type: + if typ.id in self.replacements: + return self.replacements[typ.id] return typ - def visit_type_var(self, typ: TypeVarType) -> Type: - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_callable_type(self, t: CallableType) -> Type: + param_spec = t.param_spec() + if param_spec is not None: + # TODO: this branch duplicates the one in expand_type(), find a way to reuse it + # without import cycle types <-> typeanal <-> expandtype. + repl = get_proper_type(self.replacements.get(param_spec.id)) + if isinstance(repl, CallableType) or isinstance(repl, Parameters): + prefix = param_spec.prefix + t = t.expand_param_spec(repl, no_prefix=True) + return t.copy_modified( + arg_types=[t.accept(self) for t in prefix.arg_types] + t.arg_types, + arg_kinds=prefix.arg_kinds + t.arg_kinds, + arg_names=prefix.arg_names + t.arg_names, + ret_type=t.ret_type.accept(self), + type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + ) + return super().visit_callable_type(t) + + def visit_param_spec(self, typ: ParamSpecType) -> Type: + if typ.id in self.replacements: + repl = get_proper_type(self.replacements[typ.id]) + # TODO: all the TODOs from same logic in expand_type() apply here. + if isinstance(repl, Instance): + return repl + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(typ, repl) + else: + return repl return typ @@ -3236,7 +3257,7 @@ def visit_instance(self, typ: Instance) -> None: def replace_alias_tvars( - tp: Type, vars: list[str], subs: list[Type], newline: int, newcolumn: int + tp: Type, vars: list[TypeVarLikeType], subs: list[Type], newline: int, newcolumn: int ) -> Type: """Replace type variables in a generic type alias tp with substitutions subs resetting context. Length of subs should be already checked. @@ -3252,6 +3273,7 @@ def replace_alias_tvars( class HasTypeVars(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) + self.skip_alias_target = True def visit_type_var(self, t: TypeVarType) -> bool: return True @@ -3406,6 +3428,41 @@ def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance ) +def expand_param_spec( + t: ParamSpecType, repl: ParamSpecType | Parameters | CallableType +) -> ProperType: + """This is shared part of the logic w.r.t. ParamSpec instantiation. + + It is shared between type aliases and proper types, that currently use somewhat different + logic for instantiation.""" + if isinstance(repl, ParamSpecType): + return repl.copy_modified( + flavor=t.flavor, + prefix=t.prefix.copy_modified( + arg_types=t.prefix.arg_types + repl.prefix.arg_types, + arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, + arg_names=t.prefix.arg_names + repl.prefix.arg_names, + ), + ) + else: + # if the paramspec is *P.args or **P.kwargs: + if t.flavor != ParamSpecFlavor.BARE: + assert isinstance(repl, CallableType), "Should not be able to get here." + # Is this always the right thing to do? + param_spec = repl.param_spec() + if param_spec: + return param_spec.with_flavor(t.flavor) + else: + return repl + else: + return Parameters( + t.prefix.arg_types + repl.arg_types, + t.prefix.arg_kinds + repl.arg_kinds, + t.prefix.arg_names + repl.arg_names, + variables=[*t.prefix.variables, *repl.variables], + ) + + def store_argument_type( defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] ) -> None: diff --git a/mypy/typetraverser.py b/mypy/typetraverser.py index afe77efff78d..9c4a9157ad6a 100644 --- a/mypy/typetraverser.py +++ b/mypy/typetraverser.py @@ -131,6 +131,9 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_type_alias_type(self, t: TypeAliasType) -> None: + # TODO: sometimes we want to traverse target as well + # We need to find a way to indicate explicitly the intent, + # maybe make this method abstract (like for TypeTranslator)? self.traverse_types(t.args) def visit_unpack_type(self, t: UnpackType) -> None: diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 04108dded723..dd7e31528a4f 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -655,7 +655,7 @@ a: other.Array[float] reveal_type(a) # N: Revealed type is "other.array[Any, other.dtype[builtins.float]]" [out] -main:3: error: Type argument "float" of "dtype" must be a subtype of "generic" [type-var] +main:3: error: Type argument "float" of "Array" must be a subtype of "generic" [type-var] a: other.Array[float] ^ [file other.py] @@ -1031,8 +1031,9 @@ IntNode[int](1, 1) IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" SameNode = Node[T, T] -# TODO: fix https://github.com/python/mypy/issues/7084. -ff = SameNode[T](1, 1) +ff = SameNode[T](1, 1) # E: Type variable "__main__.T" is unbound \ + # N: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) \ + # N: (Hint: Use "T" in function signature to bind "T" inside a function) a = SameNode(1, 'x') reveal_type(a) # N: Revealed type is "__main__.Node[Any, Any]" b = SameNode[int](1, 1) @@ -1101,13 +1102,12 @@ BadA = A[str, T] # One error here SameA = A[T, T] x = None # type: SameA[int] -y = None # type: SameA[str] # Two errors here, for both args of A +y = None # type: SameA[str] # Another error here [builtins fixtures/list.pyi] [out] main:9:8: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "S" of "A" cannot be "str" +main:13:1: error: Value of type variable "T" of "SameA" cannot be "str" [case testGenericTypeAliasesIgnoredPotentialAlias] class A: ... @@ -2645,3 +2645,21 @@ class C(Generic[T]): def foo(x: C[T]) -> T: return x.x(42).y # OK + +[case testNestedGenericFunctionTypeApplication] +from typing import TypeVar, Generic, List + +A = TypeVar("A") +B = TypeVar("B") + +class C(Generic[A]): + x: A + +def foo(x: A) -> A: + def bar() -> List[A]: + y = C[List[A]]() + z = C[List[B]]() # E: Type variable "__main__.B" is unbound \ + # N: (Hint: Use "Generic[B]" or "Protocol[B]" base class to bind "B" inside a class) \ + # N: (Hint: Use "B" in function signature to bind "B" inside a function) + return y.x + return bar()[0] diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 6eddcd866cab..0722ee8d91e5 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1750,11 +1750,8 @@ def f(cls: Type[object]) -> None: [case testIsinstanceTypeArgs] from typing import Iterable, TypeVar x = 1 -T = TypeVar('T') - isinstance(x, Iterable) isinstance(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -isinstance(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, (str, Iterable[int]))) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstancelist.pyi] @@ -1783,10 +1780,8 @@ isinstance(x, It2) # E: Parameterized generics cannot be used with class or ins [case testIssubclassTypeArgs] from typing import Iterable, TypeVar x = int -T = TypeVar('T') issubclass(x, Iterable) issubclass(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -issubclass(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index ef8c9095e58a..d523e5c08af8 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -2437,23 +2437,10 @@ b: Final = 3 c: Final[Literal[3]] = 3 d: Literal[3] -# TODO: Consider if we want to support cases 'b' and 'd' or not. -# Probably not: we want to mostly keep the 'types' and 'value' worlds distinct. -# However, according to final semantics, we ought to be able to substitute "b" with -# "3" wherever it's used and get the same behavior -- so maybe we do need to support -# at least case "b" for consistency? -a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.a" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.b" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.c" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.d" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid +b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid +c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid +d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid [builtins fixtures/tuple.pyi] [out] @@ -2517,9 +2504,7 @@ r: Literal[Color.RED] g: Literal[Color.GREEN] b: Literal[Color.BLUE] bad1: Literal[Color] # E: Parameter 1 of Literal[...] is invalid -bad2: Literal[Color.func] # E: Function "__main__.Color.func" is not valid as a type \ - # N: Perhaps you need "Callable[...]" or a callback protocol? \ - # E: Parameter 1 of Literal[...] is invalid +bad2: Literal[Color.func] # E: Parameter 1 of Literal[...] is invalid bad3: Literal[Color.func()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions def expects_color(x: Color) -> None: pass diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index b13f74bc3729..4a5dd0c1b04e 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -429,7 +429,6 @@ class Z(Generic[P]): ... # literals can be applied n: Z[[int]] -# TODO: type aliases too nt1 = Z[[int]] nt2: TypeAlias = Z[[int]] @@ -506,8 +505,7 @@ def f2(x: X[int, Concatenate[int, P_2]]) -> str: ... # Accepted def f3(x: X[int, [int, bool]]) -> str: ... # Accepted # ellipsis only show up here, but I can assume it works like Callable[..., R] def f4(x: X[int, ...]) -> str: ... # Accepted -# TODO: this is not rejected: -# def f5(x: X[int, int]) -> str: ... # Rejected +def f5(x: X[int, int]) -> str: ... # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" # CASE 3 def bar(x: int, *args: bool) -> int: ... @@ -844,9 +842,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`-2, *_P.args, **_P.kwargs) -> _R`-2" - -# TODO: _R` keeps flip-flopping between 5 (?), 13, 14, 15. Spooky. -# reveal_type(A().func) $ N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`13, *_P.args, **_P.kwargs) -> _R`13" +reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`5, *_P.args, **_P.kwargs) -> _R`5" def f(x: int) -> int: ... @@ -879,8 +875,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`-1, None]) -> __main__.Job[_P`-1, None]" -# TODO: flakey, _P`4 alternates around. -# reveal_type(A().func) $ N: Revealed type is "def [_P] (action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]" +reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]" reveal_type(A().func(Job(lambda x: x))) # N: Revealed type is "__main__.Job[[x: Any], None]" def f(x: int, y: int) -> None: ... @@ -1296,3 +1291,144 @@ class C(Generic[P]): reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins.int]]" [builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasBasic] +from typing import ParamSpec, Callable + +P = ParamSpec("P") +C = Callable[P, int] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" +def foo(x: int) -> str: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasConcatenate] +from typing import ParamSpec, Callable +from typing_extensions import Concatenate + +P = ParamSpec("P") +C = Callable[Concatenate[int, P], int] +def f(n: C[P]) -> C[P]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[], int]"; expected "Callable[[int], int]" +def bad() -> int: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(x: int, y: str) -> int: ... +reveal_type(bar2) # N: Revealed type is "def (builtins.int, y: builtins.str) -> builtins.int" + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" \ + # N: This is likely because "foo" has named arguments: "x". Consider marking them positional-only +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[str, int], int]"; expected "Callable[[int, int], int]" \ + # N: This is likely because "foo2" has named arguments: "x". Consider marking them positional-only +def foo2(x: str, y: int) -> int: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasRecursive] +from typing import ParamSpec, Callable, Union + +P = ParamSpec("P") +C = Callable[P, Union[int, C[P]]] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(__x: int) -> Callable[[int], int]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "C[[int]]" +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], Callable[[int], str]]"; expected "C[[int]]" +def foo2(__x: int) -> Callable[[int], str]: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInRuntimeContext] +from typing import ParamSpec, Generic + +P = ParamSpec("P") +class C(Generic[P]): ... + +c = C[int, str]() +reveal_type(c) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" + +A = C[P] +a = A[int, str]() +reveal_type(a) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInvalidLocations] +from typing import ParamSpec, Generic, List, TypeVar, Callable + +P = ParamSpec("P") +T = TypeVar("T") +A = List[T] +def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? +def g(x: A[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ + # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + +C = Callable[P, T] +x: C[int] # E: Bad number of arguments for type alias, expected: 2, given: 1 +y: C[int, str] # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" +z: C[int, str, bytes] # E: Bad number of arguments for type alias, expected: 2, given: 3 +[builtins fixtures/paramspec.pyi] + +[case testTrivialParametersHandledCorrectly] +from typing import ParamSpec, Generic, TypeVar, Callable, Any +from typing_extensions import Concatenate + +P = ParamSpec("P") +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[S, P, T]): ... + +def foo(f: Callable[P, int]) -> None: + x: C[Any, ..., Any] + x1: C[int, Concatenate[int, str, P], str] + x = x1 # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasNested] +from typing import ParamSpec, Callable, List, TypeVar, Generic +from typing_extensions import Concatenate + +P = ParamSpec("P") +A = List[Callable[P, None]] +B = List[Callable[Concatenate[int, P], None]] + +fs: A[int, str] +reveal_type(fs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.str)]" +gs: B[int, str] +reveal_type(gs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.int, builtins.str)]" + +T = TypeVar("T") +class C(Generic[T]): ... +C[Callable[P, int]]() # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ + # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index fab372976ab2..121be34f0339 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -947,3 +947,38 @@ c.SpecialImplicit = 4 c.SpecialExplicit = 4 [builtins fixtures/tuple.pyi] [typing fixtures/typing-medium.pyi] + +[case testValidTypeAliasValues] +from typing import TypeVar, Generic, List + +T = TypeVar("T", int, str) +S = TypeVar("S", int, bytes) + +class C(Generic[T]): ... +class D(C[S]): ... # E: Invalid type argument value for "C" + +U = TypeVar("U") +A = List[C[U]] +x: A[bytes] # E: Value of type variable "T" of "C" cannot be "bytes" + +V = TypeVar("V", bound=int) +class E(Generic[V]): ... +B = List[E[U]] +y: B[str] # E: Type argument "str" of "E" must be a subtype of "int" + +[case testValidTypeAliasValuesMoreRestrictive] +from typing import TypeVar, Generic, List + +T = TypeVar("T") +S = TypeVar("S", int, str) +U = TypeVar("U", bound=int) + +class C(Generic[T]): ... + +A = List[C[S]] +x: A[int] +x_bad: A[bytes] # E: Value of type variable "S" of "A" cannot be "bytes" + +B = List[C[U]] +y: B[int] +y_bad: B[str] # E: Type argument "str" of "B" must be a subtype of "int" From 4471c7e76f27ee51eb8d47a4803097ec15c62128 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 24 Nov 2022 16:52:22 +0000 Subject: [PATCH 076/292] Proposal: don't simplify unions in expand_type() (#14178) Fixes #6730 Currently `expand_type()` is inherently recursive, going through `expand_type` -> `make_simplified_union` -> `is_proper_subtype` -> `map_instance_to_supertype` -> `expand_type`. TBH I never liked this, so I propose that we don't do this. One one hand, this is a significant change in semantics, but on the other hand: * This fixes a crash (actually a whole class of crashes) that can happen even without recursive aliases * This removes an ugly import and simplifies an import cycle in mypy code * This makes mypy 2% faster (measured on self-check) To make transition smoother, I propose to make trivial simplifications, like removing `` (and `None` without strict optional), removing everything else if there is an `object` type, and remove strict duplicates. Notably, with these few things _all existing tests pass_ (and even without it, only half a dozen tests fail on `reveal_type()`). --- mypy/expandtype.py | 14 ++++++---- mypy/types.py | 30 ++++++++++++++++++++ test-data/unit/check-recursive-types.test | 34 +++++++++++++++++++++++ test-data/unit/pythoneval.test | 16 +++++++++++ 4 files changed, 89 insertions(+), 5 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index d3286480e316..96d556121fd4 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -34,7 +34,9 @@ UnionType, UnpackType, expand_param_spec, + flatten_nested_unions, get_proper_type, + remove_trivial, ) from mypy.typevartuples import ( find_unpack_in_list, @@ -405,11 +407,13 @@ def visit_literal_type(self, t: LiteralType) -> Type: return t def visit_union_type(self, t: UnionType) -> Type: - # After substituting for type variables in t.items, - # some of the resulting types might be subtypes of others. - from mypy.typeops import make_simplified_union # asdf - - return make_simplified_union(self.expand_types(t.items), t.line, t.column) + expanded = self.expand_types(t.items) + # After substituting for type variables in t.items, some resulting types + # might be subtypes of others, however calling make_simplified_union() + # can cause recursion, so we just remove strict duplicates. + return UnionType.make_union( + remove_trivial(flatten_nested_unions(expanded)), t.line, t.column + ) def visit_partial_type(self, t: PartialType) -> Type: return t diff --git a/mypy/types.py b/mypy/types.py index 7d2ac9911bef..326727310a1b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3487,3 +3487,33 @@ def store_argument_type( if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) defn.arguments[i].variable.type = arg_type + + +def remove_trivial(types: Iterable[Type]) -> list[Type]: + """Make trivial simplifications on a list of types without calling is_subtype(). + + This makes following simplifications: + * Remove bottom types (taking into account strict optional setting) + * Remove everything else if there is an `object` + * Remove strict duplicate types + """ + removed_none = False + new_types = [] + all_types = set() + for t in types: + p_t = get_proper_type(t) + if isinstance(p_t, UninhabitedType): + continue + if isinstance(p_t, NoneType) and not state.strict_optional: + removed_none = True + continue + if isinstance(p_t, Instance) and p_t.type.fullname == "builtins.object": + return [p_t] + if p_t not in all_types: + new_types.append(t) + all_types.add(p_t) + if new_types: + return new_types + if removed_none: + return [NoneType()] + return [UninhabitedType()] diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 95b0918866f1..0aa3c4c18be3 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -837,3 +837,37 @@ def foo(x: T) -> C: ... Nested = Union[C, Sequence[Nested]] x: Nested = foo(42) + +[case testNoRecursiveExpandInstanceUnionCrash] +from typing import List, Union + +class Tag(List[Union[Tag, List[Tag]]]): ... +Tag() + +[case testNoRecursiveExpandInstanceUnionCrashGeneric] +from typing import Generic, Iterable, TypeVar, Union + +ValueT = TypeVar("ValueT") +class Recursive(Iterable[Union[ValueT, Recursive[ValueT]]]): + pass + +class Base(Generic[ValueT]): + def __init__(self, element: ValueT): + pass +class Sub(Base[Union[ValueT, Recursive[ValueT]]]): + pass + +x: Iterable[str] +reveal_type(Sub) # N: Revealed type is "def [ValueT] (element: Union[ValueT`1, __main__.Recursive[ValueT`1]]) -> __main__.Sub[ValueT`1]" +reveal_type(Sub(x)) # N: Revealed type is "__main__.Sub[typing.Iterable[builtins.str]]" + +[case testNoRecursiveExpandInstanceUnionCrashInference] +from typing import TypeVar, Union, Generic, List + +T = TypeVar("T") +InList = Union[T, InListRecurse[T]] +class InListRecurse(Generic[T], List[InList[T]]): ... + +def list_thing(transforming: InList[T]) -> T: + ... +reveal_type(list_thing([5])) # N: Revealed type is "builtins.list[builtins.int]" diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 3f669246bb4e..d89a66d1c544 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1735,3 +1735,19 @@ _testEnumNameWorkCorrectlyOn311.py:12: note: Revealed type is "Union[Literal[1]? _testEnumNameWorkCorrectlyOn311.py:13: note: Revealed type is "Literal['X']?" _testEnumNameWorkCorrectlyOn311.py:14: note: Revealed type is "builtins.int" _testEnumNameWorkCorrectlyOn311.py:15: note: Revealed type is "builtins.int" + +[case testTypedDictUnionGetFull] +from typing import Dict +from typing_extensions import TypedDict + +class TD(TypedDict, total=False): + x: int + y: int + +A = Dict[str, TD] +x: A +def foo(k: str) -> TD: + reveal_type(x.get(k, {})) + return x.get(k, {}) +[out] +_testTypedDictUnionGetFull.py:11: note: Revealed type is "TypedDict('_testTypedDictUnionGetFull.TD', {'x'?: builtins.int, 'y'?: builtins.int})" From 7ea5ff6c1476d425b6434405add819f0e1abec67 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 25 Nov 2022 10:06:54 +0000 Subject: [PATCH 077/292] Fix issues with type aliases and new style unions (#14181) Fix aliases like this and other aliases involving new-style unions: ``` A = type[int] | str ``` Fixes #12392. Fixes #14158. --- mypy/checker.py | 21 +------- mypy/checkexpr.py | 3 ++ mypy/nodes.py | 23 +++++++-- mypy/semanal.py | 6 ++- mypy/server/aststrip.py | 5 ++ mypy/strconv.py | 2 + mypy/traverser.py | 2 + mypy/treetransform.py | 7 ++- test-data/unit/check-type-aliases.test | 11 ++++ test-data/unit/fine-grained.test | 31 ++++++++++++ test-data/unit/pythoneval.test | 69 +++++++++++++++++++++++++- 11 files changed, 152 insertions(+), 28 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index f9acc9766140..80f7e19c65f0 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2668,26 +2668,7 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.msg.annotation_in_unchecked_function(context=s) def check_type_alias_rvalue(self, s: AssignmentStmt) -> None: - if not (self.is_stub and isinstance(s.rvalue, OpExpr) and s.rvalue.op == "|"): - # We do this mostly for compatibility with old semantic analyzer. - # TODO: should we get rid of this? - alias_type = self.expr_checker.accept(s.rvalue) - else: - # Avoid type checking 'X | Y' in stubs, since there can be errors - # on older Python targets. - alias_type = AnyType(TypeOfAny.special_form) - - def accept_items(e: Expression) -> None: - if isinstance(e, OpExpr) and e.op == "|": - accept_items(e.left) - accept_items(e.right) - else: - # Nested union types have been converted to type context - # in semantic analysis (such as in 'list[int | str]'), - # so we don't need to deal with them here. - self.expr_checker.accept(e) - - accept_items(s.rvalue) + alias_type = self.expr_checker.accept(s.rvalue) self.store_type(s.lvalues[-1], alias_type) def check_assignment( diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 362ef1eeb7f8..ad0f42f1e32a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2847,6 +2847,9 @@ def visit_ellipsis(self, e: EllipsisExpr) -> Type: def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" + if e.analyzed: + # It's actually a type expression X | Y. + return self.accept(e.analyzed) if e.op == "and" or e.op == "or": return self.check_boolean_op(e, e) if e.op == "*" and isinstance(e.left, ListExpr): diff --git a/mypy/nodes.py b/mypy/nodes.py index f0fc13dad780..c02e21e88b44 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1969,10 +1969,20 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class OpExpr(Expression): - """Binary operation (other than . or [] or comparison operators, - which have specific nodes).""" + """Binary operation. - __slots__ = ("op", "left", "right", "method_type", "right_always", "right_unreachable") + The dot (.), [] and comparison operators have more specific nodes. + """ + + __slots__ = ( + "op", + "left", + "right", + "method_type", + "right_always", + "right_unreachable", + "analyzed", + ) __match_args__ = ("left", "op", "right") @@ -1985,8 +1995,12 @@ class OpExpr(Expression): right_always: bool # Per static analysis only: Is the right side unreachable? right_unreachable: bool + # Used for expressions that represent a type "X | Y" in some contexts + analyzed: TypeAliasExpr | None - def __init__(self, op: str, left: Expression, right: Expression) -> None: + def __init__( + self, op: str, left: Expression, right: Expression, analyzed: TypeAliasExpr | None = None + ) -> None: super().__init__() self.op = op self.left = left @@ -1994,6 +2008,7 @@ def __init__(self, op: str, left: Expression, right: Expression) -> None: self.method_type = None self.right_always = False self.right_unreachable = False + self.analyzed = analyzed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_op_expr(self) diff --git a/mypy/semanal.py b/mypy/semanal.py index 74ab1c1c6f30..698959ca1bdf 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3472,7 +3472,11 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: no_args=no_args, eager=eager, ) - if isinstance(s.rvalue, (IndexExpr, CallExpr)): # CallExpr is for `void = type(None)` + if isinstance(s.rvalue, (IndexExpr, CallExpr, OpExpr)) and ( + not isinstance(rvalue, OpExpr) + or (self.options.python_version >= (3, 10) or self.is_stub_file) + ): + # Note: CallExpr is for "void = type(None)" and OpExpr is for "X | Y" union syntax. s.rvalue.analyzed = TypeAliasExpr(alias_node) s.rvalue.analyzed.line = s.line # we use the column from resulting target, to get better location for errors diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 87ce63e9d543..83d90f31e8c4 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -54,6 +54,7 @@ MypyFile, NameExpr, Node, + OpExpr, OverloadedFuncDef, RefExpr, StarExpr, @@ -222,6 +223,10 @@ def visit_index_expr(self, node: IndexExpr) -> None: node.analyzed = None # May have been an alias or type application. super().visit_index_expr(node) + def visit_op_expr(self, node: OpExpr) -> None: + node.analyzed = None # May have been an alias + super().visit_op_expr(node) + def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None diff --git a/mypy/strconv.py b/mypy/strconv.py index f1aa6819e2b7..861a7c9b7fa0 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -413,6 +413,8 @@ def visit_call_expr(self, o: mypy.nodes.CallExpr) -> str: return self.dump(a + extra, o) def visit_op_expr(self, o: mypy.nodes.OpExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) return self.dump([o.op, o.left, o.right], o) def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> str: diff --git a/mypy/traverser.py b/mypy/traverser.py index 3c4f21601b88..378d44c67f47 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -262,6 +262,8 @@ def visit_call_expr(self, o: CallExpr) -> None: def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) + if o.analyzed is not None: + o.analyzed.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: diff --git a/mypy/treetransform.py b/mypy/treetransform.py index 2f678b89b1e6..432baf7d73b7 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -519,7 +519,12 @@ def visit_call_expr(self, node: CallExpr) -> CallExpr: ) def visit_op_expr(self, node: OpExpr) -> OpExpr: - new = OpExpr(node.op, self.expr(node.left), self.expr(node.right)) + new = OpExpr( + node.op, + self.expr(node.left), + self.expr(node.right), + cast(Optional[TypeAliasExpr], self.optional_expr(node.analyzed)), + ) new.method_type = self.optional_type(node.method_type) return new diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 121be34f0339..e9b5e3e4d966 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -948,6 +948,17 @@ c.SpecialExplicit = 4 [builtins fixtures/tuple.pyi] [typing fixtures/typing-medium.pyi] +[case testNewStyleUnionInTypeAliasWithMalformedInstance] +# flags: --python-version 3.10 +from typing import List + +A = List[int, str] | int # E: "list" expects 1 type argument, but 2 given +B = int | list[int, str] # E: "list" expects 1 type argument, but 2 given +a: A +b: B +reveal_type(a) # N: Revealed type is "Union[builtins.list[Any], builtins.int]" +reveal_type(b) # N: Revealed type is "Union[builtins.int, builtins.list[Any]]" + [case testValidTypeAliasValues] from typing import TypeVar, Generic, List diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index c162f402486a..1a318b52a082 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10277,3 +10277,34 @@ A = str m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" == m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" + +[case testTypeAliasWithNewStyleUnionChangedToVariable] +# flags: --python-version 3.10 +import a + +[file a.py] +from b import C, D +A = C | D +a: A +reveal_type(a) + +[file b.py] +C = int +D = str + +[file b.py.2] +C = "x" +D = "y" + +[file b.py.3] +C = str +D = int +[out] +a.py:4: note: Revealed type is "Union[builtins.int, builtins.str]" +== +a.py:2: error: Unsupported left operand type for | ("str") +a.py:3: error: Variable "a.A" is not valid as a type +a.py:3: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a.py:4: note: Revealed type is "A?" +== +a.py:4: note: Revealed type is "Union[builtins.str, builtins.int]" diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index d89a66d1c544..acaaf5f21cf0 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1663,7 +1663,7 @@ _testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, No [case testTypeAliasWithNewStyleUnion] # flags: --python-version 3.10 -from typing import Literal, Type, TypeAlias +from typing import Literal, Type, TypeAlias, TypeVar Foo = Literal[1, 2] reveal_type(Foo) @@ -1682,15 +1682,44 @@ Opt4 = float | None A = Type[int] | str B: TypeAlias = Type[int] | str +C = type[int] | str + +D = type[int] | str +x: D +reveal_type(x) +E: TypeAlias = type[int] | str +y: E +reveal_type(y) +F = list[type[int] | str] + +T = TypeVar("T", int, str) +def foo(x: T) -> T: + A = type[int] | str + a: A + return x [out] _testTypeAliasWithNewStyleUnion.py:5: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnion.py:25: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnion.py:28: note: Revealed type is "Union[Type[builtins.int], builtins.str]" [case testTypeAliasWithNewStyleUnionInStub] # flags: --python-version 3.7 import m +a: m.A +reveal_type(a) +b: m.B +reveal_type(b) +c: m.C +reveal_type(c) +d: m.D +reveal_type(d) +e: m.E +reveal_type(e) +f: m.F +reveal_type(f) [file m.pyi] -from typing import Type +from typing import Type, Callable from typing_extensions import Literal, TypeAlias Foo = Literal[1, 2] @@ -1710,8 +1739,27 @@ Opt4 = float | None A = Type[int] | str B: TypeAlias = Type[int] | str +C = type[int] | str +reveal_type(C) +D: TypeAlias = type[int] | str +E = str | type[int] +F: TypeAlias = str | type[int] +G = list[type[int] | str] +H = list[str | type[int]] + +CU1 = int | Callable[[], str | bool] +CU2: TypeAlias = int | Callable[[], str | bool] +CU3 = int | Callable[[str | bool], str] +CU4: TypeAlias = int | Callable[[str | bool], str] [out] m.pyi:5: note: Revealed type is "typing._SpecialForm" +m.pyi:22: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnionInStub.py:4: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:6: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:8: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:10: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:12: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" +_testTypeAliasWithNewStyleUnionInStub.py:14: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" [case testEnumNameWorkCorrectlyOn311] # flags: --python-version 3.11 @@ -1736,6 +1784,23 @@ _testEnumNameWorkCorrectlyOn311.py:13: note: Revealed type is "Literal['X']?" _testEnumNameWorkCorrectlyOn311.py:14: note: Revealed type is "builtins.int" _testEnumNameWorkCorrectlyOn311.py:15: note: Revealed type is "builtins.int" +[case testTypeAliasNotSupportedWithNewStyleUnion] +# flags: --python-version 3.9 +from typing_extensions import TypeAlias +A = type[int] | str +B = str | type[int] +C = str | int +D: TypeAlias = str | int +[out] +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]") +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Unsupported left operand type for | ("Type[str]") + [case testTypedDictUnionGetFull] from typing import Dict from typing_extensions import TypedDict From a9024a801b2fe11f1e67ea62d55b22f4dc913076 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 25 Nov 2022 12:58:26 +0000 Subject: [PATCH 078/292] Fix type aliases with fixed-length tuples (#14184) Fix type aliases like these: ``` T = tuple[int, str] ``` Type applications involving fixed-length tuples still don't fully work. The inferred type is a variable-length tuple when constructing a tuple using a type application, e.g. `tuple[int, str]((1, ""))`. This seems a pretty low-priority issue, whereas the type alias use case seems common. Most of the work was by @sobolevn originally in #12134. I just finished it up. Fixes #11098. --- mypy/checkexpr.py | 3 ++ test-data/unit/check-type-aliases.test | 32 ++++++++++++++++++++ test-data/unit/pythoneval.test | 42 ++++++++++++++++++++++++++ 3 files changed, 77 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index ad0f42f1e32a..eb585aba42df 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3969,6 +3969,9 @@ def apply_type_arguments_to_callable( if isinstance(tp, CallableType): if len(tp.variables) != len(args): + if tp.is_type_obj() and tp.type_object().fullname == "builtins.tuple": + # TODO: Specialize the callable for the type arguments + return tp self.msg.incompatible_type_application(len(tp.variables), len(args), ctx) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, args, ctx) diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index e9b5e3e4d966..e5d9bf94873a 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -993,3 +993,35 @@ x_bad: A[bytes] # E: Value of type variable "S" of "A" cannot be "bytes" B = List[C[U]] y: B[int] y_bad: B[str] # E: Type argument "str" of "B" must be a subtype of "int" + +[case testTupleWithDifferentArgsPy38] +# flags: --python-version 3.8 +NotYet1 = tuple[float] # E: "tuple" is not subscriptable +NotYet2 = tuple[float, float] # E: "tuple" is not subscriptable +NotYet3 = tuple[float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +NotYet4 = tuple[float, float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +[builtins fixtures/tuple.pyi] + +[case testTupleWithDifferentArgsStub] +# https://github.com/python/mypy/issues/11098 +import tup + +[file tup.pyi] +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] | str +Correct5 = tuple[int, str] +Correct6 = tuple[int, ...] + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +# Wrong: + +WrongTypeElement = str | tuple[float, 1] # E: Invalid type: try using Literal[1] instead? +WrongEllipsis = str | tuple[float, float, ...] # E: Unexpected "..." +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index acaaf5f21cf0..3520b5874018 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1816,3 +1816,45 @@ def foo(k: str) -> TD: return x.get(k, {}) [out] _testTypedDictUnionGetFull.py:11: note: Revealed type is "TypedDict('_testTypedDictUnionGetFull.TD', {'x'?: builtins.int, 'y'?: builtins.int})" + +[case testTupleWithDifferentArgsPy310] +# https://github.com/python/mypy/issues/11098 +# flags: --python-version 3.10 +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] +Correct5 = tuple[float, ...] +Correct6 = list[tuple[int, str]] +c1: Correct1 +c2: Correct2 +c3: Correct3 +c4: Correct4 +c5: Correct5 +c6: Correct6 +reveal_type(c1) +reveal_type(c2) +reveal_type(c3) +reveal_type(c4) +reveal_type(c5) +reveal_type(c6) + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +WrongTypeElement = str | tuple[float, 1] # Error +WrongEllipsis = tuple[float, float, ...] | str # Error + +# TODO: This should produce a fixed-length tuple +reveal_type(tuple[int, str]((1, "x"))) +[out] +_testTupleWithDifferentArgsPy310.py:15: note: Revealed type is "Union[builtins.str, Tuple[builtins.float, builtins.float, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:16: note: Revealed type is "Union[Tuple[builtins.float], builtins.str]" +_testTupleWithDifferentArgsPy310.py:17: note: Revealed type is "Union[builtins.tuple[builtins.float, ...], builtins.str]" +_testTupleWithDifferentArgsPy310.py:18: note: Revealed type is "Tuple[builtins.float, builtins.str]" +_testTupleWithDifferentArgsPy310.py:19: note: Revealed type is "builtins.tuple[builtins.float, ...]" +_testTupleWithDifferentArgsPy310.py:20: note: Revealed type is "builtins.list[Tuple[builtins.int, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:26: error: Invalid type: try using Literal[1] instead? +_testTupleWithDifferentArgsPy310.py:27: error: Unexpected "..." +_testTupleWithDifferentArgsPy310.py:30: note: Revealed type is "builtins.tuple[builtins.object, ...]" From 278a09537ac51a131b1298e4d1ba015bac3fc8c5 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 25 Nov 2022 17:27:37 +0000 Subject: [PATCH 079/292] Fix crash on overriding with frozen attrs (#14186) Fixes #6715 Fix is straightforward, currently we assume that if we have a variable in MRO, and its name appears in current class, it is from this class, which in fact may not be the case when a variable is overridden with a property or method. I also add a test case for a crash that was previously reported in the same issue but is already (accidentally?) fixed. --- mypy/plugins/attrs.py | 6 ++++- test-data/unit/check-attr.test | 46 ++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 17f1794d8c75..ce0f45967152 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -736,7 +736,11 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node - assert isinstance(node, Var) + if not isinstance(node, Var): + # The superclass attribute was overridden with a non-variable. + # No need to do anything here, override will be verified during + # type checking. + continue node.is_property = True else: # This variable belongs to a super class so create new Var so we diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index fe123acfa001..4d27d5f39d1e 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1788,3 +1788,49 @@ class C: c = C(x=[C.D()]) reveal_type(c.x) # N: Revealed type is "builtins.list[__main__.C.D]" [builtins fixtures/list.pyi] + +[case testRedefinitionInFrozenClassNoCrash] +import attr + +@attr.s +class MyData: + is_foo: bool = attr.ib() + + @staticmethod # E: Name "is_foo" already defined on line 5 + def is_foo(string: str) -> bool: ... +[builtins fixtures/classmethod.pyi] + +[case testOverrideWithPropertyInFrozenClassNoCrash] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> str: ... +[builtins fixtures/property.pyi] + +[case testOverrideWithPropertyInFrozenClassChecked] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> int: ... # E: Signature of "name" incompatible with supertype "Base" + +# This matches runtime semantics +reveal_type(Sub) # N: Revealed type is "def (*, name: builtins.str, first_name: builtins.str, last_name: builtins.str) -> __main__.Sub" +[builtins fixtures/property.pyi] From db0beb1014d8367221d47d3b11e2b49e757fa2fa Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 25 Nov 2022 14:03:39 -0600 Subject: [PATCH 080/292] Switch error code used to report vars defined in different branch (#14176) We previously used `use-before-def` code here but this commit switched it to use `partially-defined`. This particular check generates a lot of false positives, in particular around loops of the form: ```python for i in range(2) if i == 0: x = 1 else: y = x ``` While in an ideal world mypy has no false positives, it's not feasible for us to handle this correctly in the short-term. Moving this to partially-defined error code makes the `use-before-def` have a much lower false positive rate, which is a plus. Unfortunately, `partially-defined` will always have a higher false positive rate. This means that if we enable it by default, lots of people will disable this check. We want to avoid the same thing happening to use-before-def check. See [this PR](https://github.com/python/mypy/pull/14166#issuecomment-1325709734) for further discussion. --- mypy/partially_defined.py | 10 +++- test-data/unit/check-partially-defined.test | 60 +++++++++++++++------ 2 files changed, 53 insertions(+), 17 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 70a454beae9c..7e71af4044ed 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -234,6 +234,7 @@ class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> None: self.msg = msg self.type_map = type_map + self.loop_depth = 0 self.tracker = DefinedVariableTracker() def process_lvalue(self, lvalue: Lvalue | None) -> None: @@ -319,10 +320,12 @@ def visit_for_stmt(self, o: ForStmt) -> None: self.process_lvalue(o.index) o.index.accept(self) self.tracker.start_branch_statement() + self.loop_depth += 1 o.body.accept(self) self.tracker.next_branch() if o.else_body: o.else_body.accept(self) + self.loop_depth -= 1 self.tracker.end_branch_statement() def visit_return_stmt(self, o: ReturnStmt) -> None: @@ -354,7 +357,9 @@ def visit_expression_stmt(self, o: ExpressionStmt) -> None: def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) self.tracker.start_branch_statement() + self.loop_depth += 1 o.body.accept(self) + self.loop_depth -= 1 if not checker.is_true_literal(o.expr): self.tracker.next_branch() if o.else_body: @@ -380,7 +385,10 @@ def visit_name_expr(self, o: NameExpr) -> None: self.tracker.record_definition(o.name) elif self.tracker.is_defined_in_different_branch(o.name): # A variable is defined in one branch but used in a different branch. - self.msg.var_used_before_def(o.name, o) + if self.loop_depth > 0: + self.msg.variable_may_be_undefined(o.name, o) + else: + self.msg.var_used_before_def(o.name, o) elif self.tracker.is_undefined(o.name): # A variable is undefined. It could be due to two things: # 1. A variable is just totally undefined diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index c63023aa2746..2028362cedbe 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -206,6 +206,48 @@ def f5() -> int: return 3 return 1 +[case testDefinedDifferentBranchUseBeforeDef] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def + +def f0() -> None: + if int(): + x = 0 + else: + y = x # E: Name "x" is used before definition + z = x # E: Name "x" is used before definition + +def f1() -> None: + x = 1 + if int(): + x = 0 + else: + y = x # No error. + + +[case testDefinedDifferentBranchPartiallyDefined] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def + +def f0() -> None: + first_iter = True + for i in [0, 1]: + if first_iter: + first_iter = False + x = 0 + else: + # This is technically a false positive but mypy isn't smart enough for this yet. + y = x # E: Name "x" may be undefined + z = x # E: Name "x" may be undefined + + +def f1() -> None: + while True: + if int(): + x = 0 + else: + y = x # E: Name "x" may be undefined + z = x # E: Name "x" may be undefined + + [case testAssert] # flags: --enable-error-code partially-defined def f1() -> int: @@ -394,21 +436,7 @@ def f0() -> None: x = y # E: Name "y" is used before definition y: int = 1 -def f1() -> None: - if int(): - x = 0 - else: - y = x # E: Name "x" is used before definition - z = x # E: Name "x" is used before definition - def f2() -> None: - x = 1 - if int(): - x = 0 - else: - y = x # No error. - -def f3() -> None: if int(): pass else: @@ -418,14 +446,14 @@ def f3() -> None: def inner2() -> None: z = 0 -def f4() -> None: +def f3() -> None: if int(): pass else: y = z # E: Name "z" is used before definition z: int = 2 -def f5() -> None: +def f4() -> None: if int(): pass else: From d58a851463fca5f2512c66e3db7ecee0d26d1eae Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 25 Nov 2022 15:02:56 -0600 Subject: [PATCH 081/292] Implement miscellaneous fixes for partially-defined check (#14175) These are the issues that I've found using mypy-primer. You should be able to review this PR commit-by-commit. Each commit includes the relevant tests: - Process imports correctly - Support for function names - Skip stub files (this change has no tests) - Handle builtins and implicit module attrs (e.g. `str` and `__doc__`) - Improved support for lambdas. --- mypy/build.py | 3 + mypy/partially_defined.py | 43 +++++++ test-data/unit/check-partially-defined.test | 121 ++++++++++++++++++++ 3 files changed, 167 insertions(+) diff --git a/mypy/build.py b/mypy/build.py index b32276dd3020..8df5e480791d 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2349,6 +2349,9 @@ def type_check_second_pass(self) -> bool: def detect_partially_defined_vars(self, type_map: dict[Expression, Type]) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" + if self.tree.is_stub: + # We skip stub files because they aren't actually executed. + return manager = self.manager if manager.errors.is_error_code_enabled( codes.PARTIALLY_DEFINED diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 7e71af4044ed..4ca083f5e4c5 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -16,15 +16,21 @@ FuncItem, GeneratorExpr, IfStmt, + Import, + ImportFrom, + LambdaExpr, ListExpr, Lvalue, MatchStmt, NameExpr, RaiseStmt, + RefExpr, ReturnStmt, + StarExpr, TupleExpr, WhileStmt, WithStmt, + implicit_module_attrs, ) from mypy.patterns import AsPattern, StarredPattern from mypy.reachability import ALWAYS_TRUE, infer_pattern_value @@ -213,6 +219,10 @@ def is_undefined(self, name: str) -> bool: return self._scope().branch_stmts[-1].is_undefined(name) +def refers_to_builtin(o: RefExpr) -> bool: + return o.fullname is not None and o.fullname.startswith("builtins.") + + class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): """Detects the following cases: - A variable that's defined only part of the time. @@ -236,6 +246,8 @@ def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> Non self.type_map = type_map self.loop_depth = 0 self.tracker = DefinedVariableTracker() + for name in implicit_module_attrs: + self.tracker.record_definition(name) def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): @@ -244,6 +256,8 @@ def process_lvalue(self, lvalue: Lvalue | None) -> None: for ref in refs: self.msg.var_used_before_def(lvalue.name, ref) self.tracker.record_definition(lvalue.name) + elif isinstance(lvalue, StarExpr): + self.process_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): for item in lvalue.items: self.process_lvalue(item) @@ -291,6 +305,7 @@ def visit_match_stmt(self, o: MatchStmt) -> None: self.tracker.end_branch_statement() def visit_func_def(self, o: FuncDef) -> None: + self.tracker.record_definition(o.name) self.tracker.enter_scope() super().visit_func_def(o) self.tracker.exit_scope() @@ -332,6 +347,11 @@ def visit_return_stmt(self, o: ReturnStmt) -> None: super().visit_return_stmt(o) self.tracker.skip_branch() + def visit_lambda_expr(self, o: LambdaExpr) -> None: + self.tracker.enter_scope() + super().visit_lambda_expr(o) + self.tracker.exit_scope() + def visit_assert_stmt(self, o: AssertStmt) -> None: super().visit_assert_stmt(o) if checker.is_false_literal(o.expr): @@ -377,6 +397,8 @@ def visit_starred_pattern(self, o: StarredPattern) -> None: super().visit_starred_pattern(o) def visit_name_expr(self, o: NameExpr) -> None: + if refers_to_builtin(o): + return if self.tracker.is_partially_defined(o.name): # A variable is only defined in some branches. if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): @@ -404,3 +426,24 @@ def visit_with_stmt(self, o: WithStmt) -> None: expr.accept(self) self.process_lvalue(idx) o.body.accept(self) + + def visit_import(self, o: Import) -> None: + for mod, alias in o.ids: + if alias is not None: + self.tracker.record_definition(alias) + else: + # When you do `import x.y`, only `x` becomes defined. + names = mod.split(".") + if len(names) > 0: + # `names` should always be nonempty, but we don't want mypy + # to crash on invalid code. + self.tracker.record_definition(names[0]) + super().visit_import(o) + + def visit_import_from(self, o: ImportFrom) -> None: + for mod, alias in o.names: + name = alias + if name is None: + name = mod + self.tracker.record_definition(name) + super().visit_import_from(o) diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 2028362cedbe..85bf08079f79 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -90,6 +90,16 @@ else: a = y + x # E: Name "x" may be undefined a = y + z # E: Name "z" may be undefined +[case testIndexExpr] +# flags: --enable-error-code partially-defined + +if int(): + *x, y = (1, 2) +else: + x = [1, 2] +a = x # No error. +b = y # E: Name "y" may be undefined + [case testRedefined] # flags: --enable-error-code partially-defined y = 3 @@ -104,6 +114,32 @@ else: x = y + 2 +[case testFunction] +# flags: --enable-error-code partially-defined +def f0() -> None: + if int(): + def some_func() -> None: + pass + + some_func() # E: Name "some_func" may be undefined + +def f1() -> None: + if int(): + def some_func() -> None: + pass + else: + def some_func() -> None: + pass + + some_func() # No error. + +[case testLambda] +# flags: --enable-error-code partially-defined +def f0(b: bool) -> None: + if b: + fn = lambda: 2 + y = fn # E: Name "fn" may be undefined + [case testGenerator] # flags: --enable-error-code partially-defined if int(): @@ -460,3 +496,88 @@ def f4() -> None: y = z # E: Name "z" is used before definition x = z # E: Name "z" is used before definition z: int = 2 + +[case testUseBeforeDefImportsBasic] +# flags: --enable-error-code use-before-def +import foo # type: ignore +import x.y # type: ignore + +def f0() -> None: + a = foo # No error. + foo: int = 1 + +def f1() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +def f2() -> None: + a = x # No error. + x: int = 1 + +def f3() -> None: + a = x.y # No error. + x: int = 1 + +[case testUseBeforeDefImportBasicRename] +# flags: --enable-error-code use-before-def +import x.y as z # type: ignore +from typing import Any + +def f0() -> None: + a = z # No error. + z: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +def f2() -> None: + a = x.y # E: Name "x" is used before definition + x: Any = 1 + +def f3() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +[case testUseBeforeDefImportFrom] +# flags: --enable-error-code use-before-def +from foo import x # type: ignore + +def f0() -> None: + a = x # No error. + x: int = 1 + +[case testUseBeforeDefImportFromRename] +# flags: --enable-error-code use-before-def +from foo import x as y # type: ignore + +def f0() -> None: + a = y # No error. + y: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +[case testUseBeforeDefFunctionDeclarations] +# flags: --enable-error-code use-before-def + +def f0() -> None: + def inner() -> None: + pass + + inner() # No error. + inner = lambda: None + +[case testUseBeforeDefBuiltins] +# flags: --enable-error-code use-before-def + +def f0() -> None: + s = type(123) + type = "abc" + a = type + +[case testUseBeforeDefImplicitModuleAttrs] +# flags: --enable-error-code use-before-def +a = __name__ # No error. +__name__ = "abc" From a9c62c5f82f34a923b8117a5394983aefce37b63 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Fri, 25 Nov 2022 14:57:08 -0800 Subject: [PATCH 082/292] stubtest: if a default is present in the stub, check that it is correct (#14085) Helps with python/typeshed#8988. --- mypy/evalexpr.py | 204 ++++++++++++++++++++++++++++++++++++++ mypy/stubtest.py | 19 ++++ mypy/test/teststubtest.py | 55 +++++++++- 3 files changed, 277 insertions(+), 1 deletion(-) create mode 100644 mypy/evalexpr.py diff --git a/mypy/evalexpr.py b/mypy/evalexpr.py new file mode 100644 index 000000000000..2bc6966fa2fa --- /dev/null +++ b/mypy/evalexpr.py @@ -0,0 +1,204 @@ +""" + +Evaluate an expression. + +Used by stubtest; in a separate file because things break if we don't +put it in a mypyc-compiled file. + +""" +import ast +from typing_extensions import Final + +import mypy.nodes +from mypy.visitor import ExpressionVisitor + +UNKNOWN = object() + + +class _NodeEvaluator(ExpressionVisitor[object]): + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> int: + return o.value + + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: + return o.value + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> object: + # The value of a BytesExpr is a string created from the repr() + # of the bytes object. Get the original bytes back. + try: + return ast.literal_eval(f"b'{o.value}'") + except SyntaxError: + return ast.literal_eval(f'b"{o.value}"') + + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> float: + return o.value + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> object: + return o.value + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> object: + return Ellipsis + + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> object: + return UNKNOWN + + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> object: + if o.name == "True": + return True + elif o.name == "False": + return False + elif o.name == "None": + return None + # TODO: Handle more names by figuring out a way to hook into the + # symbol table. + return UNKNOWN + + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> object: + return UNKNOWN + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> object: + return UNKNOWN + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> object: + return UNKNOWN + + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> object: + return UNKNOWN + + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> object: + return UNKNOWN + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> object: + return UNKNOWN + + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> object: + return o.expr.accept(self) + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> object: + return o.expr.accept(self) + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> object: + return UNKNOWN + + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> object: + return UNKNOWN + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> object: + operand = o.expr.accept(self) + if operand is UNKNOWN: + return UNKNOWN + if o.op == "-": + if isinstance(operand, (int, float, complex)): + return -operand + elif o.op == "+": + if isinstance(operand, (int, float, complex)): + return +operand + elif o.op == "~": + if isinstance(operand, int): + return ~operand + elif o.op == "not": + if isinstance(operand, (bool, int, float, str, bytes)): + return not operand + return UNKNOWN + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> object: + return o.value.accept(self) + + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return items + return UNKNOWN + + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> object: + items = [ + (UNKNOWN if key is None else key.accept(self), value.accept(self)) + for key, value in o.items + ] + if all(key is not UNKNOWN and value is not None for key, value in items): + return dict(items) + return UNKNOWN + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return tuple(items) + return UNKNOWN + + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return set(items) + return UNKNOWN + + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> object: + return UNKNOWN + + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> object: + return UNKNOWN + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> object: + return UNKNOWN + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> object: + return UNKNOWN + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> object: + return UNKNOWN + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> object: + return UNKNOWN + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> object: + return UNKNOWN + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> object: + return UNKNOWN + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> object: + return UNKNOWN + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> object: + return UNKNOWN + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> object: + return UNKNOWN + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> object: + return UNKNOWN + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> object: + return UNKNOWN + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> object: + return UNKNOWN + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> object: + return UNKNOWN + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> object: + return UNKNOWN + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> object: + return UNKNOWN + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> object: + return UNKNOWN + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> object: + return UNKNOWN + + def visit_temp_node(self, o: mypy.nodes.TempNode) -> object: + return UNKNOWN + + +_evaluator: Final = _NodeEvaluator() + + +def evaluate_expression(expr: mypy.nodes.Expression) -> object: + """Evaluate an expression at runtime. + + Return the result of the expression, or UNKNOWN if the expression cannot be + evaluated. + """ + return expr.accept(_evaluator) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 74e57d9e5617..8add8178823a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -29,11 +29,13 @@ import mypy.build import mypy.modulefinder +import mypy.nodes import mypy.state import mypy.types import mypy.version from mypy import nodes from mypy.config_parser import parse_config_file +from mypy.evalexpr import UNKNOWN, evaluate_expression from mypy.options import Options from mypy.util import FancyFormatter, bytes_to_human_readable_repr, is_dunder, plural_s @@ -573,6 +575,23 @@ def _verify_arg_default_value( f"has a default value of type {runtime_type}, " f"which is incompatible with stub argument type {stub_type}" ) + if stub_arg.initializer is not None: + stub_default = evaluate_expression(stub_arg.initializer) + if ( + stub_default is not UNKNOWN + and stub_default is not ... + and ( + stub_default != runtime_arg.default + # We want the types to match exactly, e.g. in case the stub has + # True and the runtime has 1 (or vice versa). + or type(stub_default) is not type(runtime_arg.default) # noqa: E721 + ) + ): + yield ( + f'runtime argument "{runtime_arg.name}" ' + f"has a default value of {runtime_arg.default!r}, " + f"which is different from stub argument default {stub_default!r}" + ) else: if stub_arg.kind.is_optional(): yield ( diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 5a6904bfaaf4..e863f4f57568 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -302,7 +302,7 @@ def test_arg_kind(self) -> Iterator[Case]: ) @collect_cases - def test_default_value(self) -> Iterator[Case]: + def test_default_presence(self) -> Iterator[Case]: yield Case( stub="def f1(text: str = ...) -> None: ...", runtime="def f1(text = 'asdf'): pass", @@ -336,6 +336,59 @@ def f6(text: _T = ...) -> None: ... error="f6", ) + @collect_cases + def test_default_value(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = 'x') -> None: ...", + runtime="def f1(text = 'y'): pass", + error="f1", + ) + yield Case( + stub='def f2(text: bytes = b"x\'") -> None: ...', + runtime='def f2(text = b"x\'"): pass', + error=None, + ) + yield Case( + stub='def f3(text: bytes = b"y\'") -> None: ...', + runtime='def f3(text = b"x\'"): pass', + error="f3", + ) + yield Case( + stub="def f4(text: object = 1) -> None: ...", + runtime="def f4(text = 1.0): pass", + error="f4", + ) + yield Case( + stub="def f5(text: object = True) -> None: ...", + runtime="def f5(text = 1): pass", + error="f5", + ) + yield Case( + stub="def f6(text: object = True) -> None: ...", + runtime="def f6(text = True): pass", + error=None, + ) + yield Case( + stub="def f7(text: object = not True) -> None: ...", + runtime="def f7(text = False): pass", + error=None, + ) + yield Case( + stub="def f8(text: object = not True) -> None: ...", + runtime="def f8(text = True): pass", + error="f8", + ) + yield Case( + stub="def f9(text: object = {1: 2}) -> None: ...", + runtime="def f9(text = {1: 3}): pass", + error="f9", + ) + yield Case( + stub="def f10(text: object = [1, 2]) -> None: ...", + runtime="def f10(text = [1, 2]): pass", + error=None, + ) + @collect_cases def test_static_class_method(self) -> Iterator[Case]: yield Case( From 5795488903f807211d267d3319d153c30e74b77e Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Sun, 27 Nov 2022 09:40:05 -0600 Subject: [PATCH 083/292] [partially defined] fix gating by error code (#14194) This was incorrect for a few callsites. I refactored the code to make it harder to make a mistake like this. This wasn't visible before because we only run the PartillyDefinedVisitor if either of the error codes reported by it are enabled. --- mypy/partially_defined.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 4ca083f5e4c5..9a8c397c0c28 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -7,6 +7,7 @@ AssignmentExpr, AssignmentStmt, BreakStmt, + Context, ContinueStmt, DictionaryComprehension, Expression, @@ -249,12 +250,20 @@ def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> Non for name in implicit_module_attrs: self.tracker.record_definition(name) + def var_used_before_def(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.USE_BEFORE_DEF): + self.msg.var_used_before_def(name, context) + + def variable_may_be_undefined(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): + self.msg.variable_may_be_undefined(name, context) + def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): # Was this name previously used? If yes, it's a use-before-definition error. refs = self.tracker.pop_undefined_ref(lvalue.name) for ref in refs: - self.msg.var_used_before_def(lvalue.name, ref) + self.var_used_before_def(lvalue.name, ref) self.tracker.record_definition(lvalue.name) elif isinstance(lvalue, StarExpr): self.process_lvalue(lvalue.expr) @@ -401,16 +410,15 @@ def visit_name_expr(self, o: NameExpr) -> None: return if self.tracker.is_partially_defined(o.name): # A variable is only defined in some branches. - if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): - self.msg.variable_may_be_undefined(o.name, o) + self.variable_may_be_undefined(o.name, o) # We don't want to report the error on the same variable multiple times. self.tracker.record_definition(o.name) elif self.tracker.is_defined_in_different_branch(o.name): # A variable is defined in one branch but used in a different branch. if self.loop_depth > 0: - self.msg.variable_may_be_undefined(o.name, o) + self.variable_may_be_undefined(o.name, o) else: - self.msg.var_used_before_def(o.name, o) + self.var_used_before_def(o.name, o) elif self.tracker.is_undefined(o.name): # A variable is undefined. It could be due to two things: # 1. A variable is just totally undefined From a82c288890e80ec85cc8d84985a65d6c4b7f9ffe Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Sun, 27 Nov 2022 12:06:01 -0600 Subject: [PATCH 084/292] [partially defined] rework handling of else in loops (#14191) `else` in loops is executed if the loop didn't exit via a raise, return, or a break. Therefore, we should treat it execution as unconditional if there is not a break statement. This PR also improves error messages in `else`, reporting them as "may be undefined" instead of "used before definition" Fixes #14097 Co-authored-by: Ivan Levkivskyi --- mypy/partially_defined.py | 49 +++++++++++++---- test-data/unit/check-partially-defined.test | 59 ++++++++++++++++----- 2 files changed, 86 insertions(+), 22 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 9a8c397c0c28..0005282d92a9 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -224,6 +224,11 @@ def refers_to_builtin(o: RefExpr) -> bool: return o.fullname is not None and o.fullname.startswith("builtins.") +class Loop: + def __init__(self) -> None: + self.has_break = False + + class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): """Detects the following cases: - A variable that's defined only part of the time. @@ -245,7 +250,7 @@ class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> None: self.msg = msg self.type_map = type_map - self.loop_depth = 0 + self.loops: list[Loop] = [] self.tracker = DefinedVariableTracker() for name in implicit_module_attrs: self.tracker.record_definition(name) @@ -344,13 +349,23 @@ def visit_for_stmt(self, o: ForStmt) -> None: self.process_lvalue(o.index) o.index.accept(self) self.tracker.start_branch_statement() - self.loop_depth += 1 + loop = Loop() + self.loops.append(loop) o.body.accept(self) self.tracker.next_branch() - if o.else_body: - o.else_body.accept(self) - self.loop_depth -= 1 self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + has_break = loop.has_break + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() def visit_return_stmt(self, o: ReturnStmt) -> None: super().visit_return_stmt(o) @@ -376,6 +391,8 @@ def visit_continue_stmt(self, o: ContinueStmt) -> None: def visit_break_stmt(self, o: BreakStmt) -> None: super().visit_break_stmt(o) + if self.loops: + self.loops[-1].has_break = True self.tracker.skip_branch() def visit_expression_stmt(self, o: ExpressionStmt) -> None: @@ -386,14 +403,28 @@ def visit_expression_stmt(self, o: ExpressionStmt) -> None: def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) self.tracker.start_branch_statement() - self.loop_depth += 1 + loop = Loop() + self.loops.append(loop) o.body.accept(self) - self.loop_depth -= 1 + has_break = loop.has_break if not checker.is_true_literal(o.expr): + # If this is a loop like `while True`, we can consider the body to be + # a single branch statement (we're guaranteed that the body is executed at least once). + # If not, call next_branch() to make all variables defined there conditional. self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() if o.else_body: o.else_body.accept(self) - self.tracker.end_branch_statement() + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() def visit_as_pattern(self, o: AsPattern) -> None: if o.name is not None: @@ -415,7 +446,7 @@ def visit_name_expr(self, o: NameExpr) -> None: self.tracker.record_definition(o.name) elif self.tracker.is_defined_in_different_branch(o.name): # A variable is defined in one branch but used in a different branch. - if self.loop_depth > 0: + if self.loops: self.variable_may_be_undefined(o.name, o) else: self.var_used_before_def(o.name, o) diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 85bf08079f79..e91e7aa65e7b 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -167,22 +167,49 @@ def foo(a: int) -> None: [case testWhile] # flags: --enable-error-code partially-defined while int(): - x = 1 + a = 1 -y = x # E: Name "x" may be undefined +x = a # E: Name "a" may be undefined while int(): - z = 1 + b = 1 else: - z = 2 + b = 2 -y = z # No error. +y = b # No error. while True: - k = 1 + c = 1 if int(): break -y = k # No error. +y = c # No error. + +# This while loop doesn't have a `break` inside, so we know that the else must always get executed. +while int(): + pass +else: + d = 1 +y = d # No error. + +while int(): + if int(): + break +else: + e = 1 +# If a while loop has a `break`, it's possible that the else didn't get executed. +y = e # E: Name "e" may be undefined + +while int(): + while int(): + if int(): + break + else: + f = 1 +else: + g = 2 + +y = f # E: Name "f" may be undefined +y = g [case testForLoop] # flags: --enable-error-code partially-defined @@ -190,7 +217,6 @@ for x in [1, 2, 3]: if x: x = 1 y = x - z = 1 else: z = 2 @@ -283,6 +309,17 @@ def f1() -> None: y = x # E: Name "x" may be undefined z = x # E: Name "x" may be undefined +def f2() -> None: + for i in [0, 1]: + x = i + else: + y = x # E: Name "x" may be undefined + +def f3() -> None: + while int(): + x = 1 + else: + y = x # E: Name "x" may be undefined [case testAssert] # flags: --enable-error-code partially-defined @@ -338,16 +375,12 @@ def f2() -> int: while int(): if int(): x = 1 - z = 1 elif int(): pass else: continue y = x # E: Name "x" may be undefined - else: - x = 2 - z = 2 - return z # E: Name "z" may be undefined + return x # E: Name "x" may be undefined def f3() -> None: while True: From 5c3d306fddd900a2eb66437af05dd36851b221a4 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Sun, 27 Nov 2022 16:06:21 -0600 Subject: [PATCH 085/292] [partially defined] use correct error code in nested if statements (#14193) In order to know to use `partially-defined` code vs `use-before-def`, we had to check if a variable is defined in any branches, not just in the current one. This would cause an error to be reported as a `use-before-def` and not `partially-defined` code. --- mypy/partially_defined.py | 14 +++++++----- test-data/unit/check-partially-defined.test | 25 ++++++++++++++++++--- 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 0005282d92a9..3c9dec13af70 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -109,11 +109,9 @@ def is_undefined(self, name: str) -> bool: branch = self.branches[-1] return name not in branch.may_be_defined and name not in branch.must_be_defined - def is_defined_in_different_branch(self, name: str) -> bool: + def is_defined_in_a_branch(self, name: str) -> bool: assert len(self.branches) > 0 - if not self.is_undefined(name): - return False - for b in self.branches[: len(self.branches) - 1]: + for b in self.branches: if name in b.must_be_defined or name in b.may_be_defined: return True return False @@ -213,7 +211,13 @@ def is_partially_defined(self, name: str) -> bool: def is_defined_in_different_branch(self, name: str) -> bool: """This will return true if a variable is defined in a branch that's not the current branch.""" assert len(self._scope().branch_stmts) > 0 - return self._scope().branch_stmts[-1].is_defined_in_different_branch(name) + stmt = self._scope().branch_stmts[-1] + if not stmt.is_undefined(name): + return False + for stmt in self._scope().branch_stmts: + if stmt.is_defined_in_a_branch(name): + return True + return False def is_undefined(self, name: str) -> bool: assert len(self._scope().branch_stmts) > 0 diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index e91e7aa65e7b..11aa30642314 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -285,6 +285,19 @@ def f1() -> None: else: y = x # No error. +def f2() -> None: + if int(): + x = 0 + elif int(): + y = x # E: Name "x" is used before definition + else: + y = x # E: Name "x" is used before definition + if int(): + z = x # E: Name "x" is used before definition + x = 1 + else: + x = 2 + w = x # No error. [case testDefinedDifferentBranchPartiallyDefined] # flags: --enable-error-code partially-defined --enable-error-code use-before-def @@ -295,11 +308,17 @@ def f0() -> None: if first_iter: first_iter = False x = 0 - else: + elif int(): # This is technically a false positive but mypy isn't smart enough for this yet. y = x # E: Name "x" may be undefined - z = x # E: Name "x" may be undefined - + else: + y = x # E: Name "x" may be undefined + if int(): + z = x # E: Name "x" may be undefined + x = 1 + else: + x = 2 + w = x # No error. def f1() -> None: while True: From 71288c749f1507feab0b2ffcc312f1fb7c33cc46 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Sun, 27 Nov 2022 16:07:29 -0600 Subject: [PATCH 086/292] [partially defined] respect check-untyped-defs flag (#14204) We should not check untyped functions unless --check-untyped-defs is set. This is part of the reason why #14166 saw so many new errors reported in open source projects. --- mypy/build.py | 2 +- mypy/partially_defined.py | 8 +++++++- test-data/unit/check-partially-defined.test | 18 ++++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 8df5e480791d..b85b49483739 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2359,7 +2359,7 @@ def detect_partially_defined_vars(self, type_map: dict[Expression, Type]) -> Non manager.errors.set_file(self.xpath, self.tree.fullname, options=manager.options) self.tree.accept( PartiallyDefinedVariableVisitor( - MessageBuilder(manager.errors, manager.modules), type_map + MessageBuilder(manager.errors, manager.modules), type_map, manager.options ) ) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 3c9dec13af70..c8db4bc5960c 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -33,6 +33,7 @@ WithStmt, implicit_module_attrs, ) +from mypy.options import Options from mypy.patterns import AsPattern, StarredPattern from mypy.reachability import ALWAYS_TRUE, infer_pattern_value from mypy.traverser import ExtendedTraverserVisitor @@ -251,9 +252,12 @@ class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): handled by the semantic analyzer. """ - def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> None: + def __init__( + self, msg: MessageBuilder, type_map: dict[Expression, Type], options: Options + ) -> None: self.msg = msg self.type_map = type_map + self.options = options self.loops: list[Loop] = [] self.tracker = DefinedVariableTracker() for name in implicit_module_attrs: @@ -329,6 +333,8 @@ def visit_func_def(self, o: FuncDef) -> None: self.tracker.exit_scope() def visit_func(self, o: FuncItem) -> None: + if o.is_dynamic() and not self.options.check_untyped_defs: + return if o.arguments is not None: for arg in o.arguments: self.tracker.record_definition(arg.variable.name) diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 11aa30642314..52822f98ab53 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -633,3 +633,21 @@ def f0() -> None: # flags: --enable-error-code use-before-def a = __name__ # No error. __name__ = "abc" + +[case testUntypedDef] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def + +def f(): + if int(): + x = 0 + z = y # No use-before-def error because def is untyped. + y = x # No partially-defined error because def is untyped. + +[case testUntypedDefCheckUntypedDefs] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def --check-untyped-defs + +def f(): + if int(): + x = 0 + z = y # E: Name "y" is used before definition + y: int = x # E: Name "x" may be undefined From 00ee7d5137854f9ae09060f834a6e988ee04e342 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 28 Nov 2022 01:05:22 +0000 Subject: [PATCH 087/292] Fix a crash when incorrect super() is used outside a method (#14208) Ref #14201 --- mypy/checkexpr.py | 7 ++++--- test-data/unit/check-super.test | 7 +++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index eb585aba42df..b97c78cba2fc 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4390,13 +4390,14 @@ def visit_super_expr(self, e: SuperExpr) -> Type: index = mro.index(type_info) else: method = self.chk.scope.top_function() - assert method is not None # Mypy explicitly allows supertype upper bounds (and no upper bound at all) # for annotating self-types. However, if such an annotation is used for # checking super() we will still get an error. So to be consistent, we also # allow such imprecise annotations for use with super(), where we fall back - # to the current class MRO instead. - if is_self_type_like(instance_type, is_classmethod=method.is_class): + # to the current class MRO instead. This works only from inside a method. + if method is not None and is_self_type_like( + instance_type, is_classmethod=method.is_class + ): if e.info and type_info in e.info.mro: mro = e.info.mro index = mro.index(type_info) diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test index 0913f4f25126..6537f563a99c 100644 --- a/test-data/unit/check-super.test +++ b/test-data/unit/check-super.test @@ -409,3 +409,10 @@ class B(A): reveal_type(super().foo()) # N: Revealed type is "T`-1" return super().foo() [builtins fixtures/classmethod.pyi] + +[case testWrongSuperOutsideMethodNoCrash] +class B: + x: int +class C1(B): ... +class C2(B): ... +super(C1, C2).x # E: Argument 2 for "super" not an instance of argument 1 From 365297c5240af67acb4798a18135dc87b734477b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 28 Nov 2022 04:21:34 +0000 Subject: [PATCH 088/292] Fix crash in Self type on forward reference in upper bound (#14206) Fixes #14199 This is straightforward, just use the same logic as for regular (user defined) type variables. --- mypy/semanal.py | 6 +++++- test-data/unit/check-classes.test | 15 +++++++++++++++ test-data/unit/check-dataclasses.test | 20 ++++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 698959ca1bdf..3e1e1a1e5d61 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1047,7 +1047,11 @@ def setup_self_type(self) -> None: assert self.type is not None info = self.type if info.self_type is not None: - return + if has_placeholder(info.self_type.upper_bound): + # Similar to regular (user defined) type variables. + self.defer(force_progress=True) + else: + return info.self_type = TypeVarType("Self", f"{info.fullname}.Self", 0, [], fill_typevars(info)) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index e3aea122ebe1..8784c73c5b17 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7679,3 +7679,18 @@ def g(b: Type[Any]) -> None: def h(b: type) -> None: class D(b): ... + +[case testNoCrashOnSelfWithForwardRefGenericClass] +from typing import Generic, Sequence, TypeVar, Self + +_T = TypeVar('_T', bound="Foo") + +class Foo: + foo: int + +class Element(Generic[_T]): + elements: Sequence[Self] + +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index d289ec5a8e58..c248f8db8585 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1981,3 +1981,23 @@ def const_two(x: T) -> str: c = Cont(Box(const_two)) reveal_type(c) # N: Revealed type is "__main__.Cont[builtins.str]" [builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnSelfWithForwardRefGenericDataclass] +from typing import Generic, Sequence, TypeVar, Self +from dataclasses import dataclass + +_T = TypeVar('_T', bound="Foo") + +@dataclass +class Foo: + foo: int + +@dataclass +class Element(Generic[_T]): + elements: Sequence[Self] + +@dataclass +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" +[builtins fixtures/dataclasses.pyi] From 19c7fd3815ca68074da317f8f63627194831dcfe Mon Sep 17 00:00:00 2001 From: Vincent Vanlaer <13833860+VincentVanlaer@users.noreply.github.com> Date: Mon, 28 Nov 2022 10:02:54 +0100 Subject: [PATCH 089/292] Fix ParamSpec constraint for types as callable (#14153) Most types can be considered as callables, constructing the type itself. When a constraint was created for a ParamSpec variable, the return type would be set to NoneType, which conflicts with assumptions that CallableType makes when it is the constructor of another type, crashing mypy. This patch replaces the return type by UninhabitedType instead, which stops CallableType from considering itself as a constructor. --- mypy/constraints.py | 2 +- .../unit/check-parameter-specification.test | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 4e78e5ff1117..6efb9997d36f 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -954,7 +954,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: arg_types=cactual.arg_types[prefix_len:], arg_kinds=cactual.arg_kinds[prefix_len:], arg_names=cactual.arg_names[prefix_len:], - ret_type=NoneType(), + ret_type=UninhabitedType(), ), ) ) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 4a5dd0c1b04e..463ba3e65466 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1292,6 +1292,30 @@ class C(Generic[P]): reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins.int]]" [builtins fixtures/paramspec.pyi] +[case testParamSpecClassConstructor] +# flags: --strict-optional +from typing import ParamSpec, Callable + +P = ParamSpec("P") + +class SomeClass: + def __init__(self, a: str) -> None: + pass + +def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> None: + pass + +def constructor(a: str) -> SomeClass: + return SomeClass(a) + +def wrong_constructor(a: bool) -> SomeClass: + return SomeClass("a") + +func(SomeClass, constructor) +func(SomeClass, wrong_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" \ + # E: Argument 2 to "func" has incompatible type "Callable[[bool], SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" +[builtins fixtures/paramspec.pyi] + [case testParamSpecInTypeAliasBasic] from typing import ParamSpec, Callable From 8ab0ef1bbcbae730ce94d59b19f024e968cab1dc Mon Sep 17 00:00:00 2001 From: Jonathan Daniel <36337649+jond01@users.noreply.github.com> Date: Mon, 28 Nov 2022 15:47:23 +0200 Subject: [PATCH 090/292] [docs] Update `running_mypy.rst` - fix typo (#14212) Fix typo - remove an unneeded period https://mypy.readthedocs.io/en/stable/running_mypy.html#ignore-missing-imports ![image](https://user-images.githubusercontent.com/36337649/204286512-34a07d28-5417-4857-87a6-f910b33fb81f.png) --- docs/source/running_mypy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 3deaf26023fc..4a7b5dcf4093 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -250,7 +250,7 @@ If you are getting this error, try: 1. Upgrading the version of the library you're using, in case a newer version has started to include type hints. -2. Searching to see if there is a :ref:`PEP 561 compliant stub package `. +2. Searching to see if there is a :ref:`PEP 561 compliant stub package ` corresponding to your third party library. Stub packages let you install type hints independently from the library itself. From d094c381277b7bec3c3b9956a53395e989347112 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Mon, 28 Nov 2022 19:00:13 -0600 Subject: [PATCH 091/292] [use before def] handle class and function definitions (#14203) Previously, we would ignore any class definitions and would fail to detect undefined classes and functions. This updates the logic to handle them. Closes #686 --- mypy/partially_defined.py | 22 +++++++++---- test-data/unit/check-partially-defined.test | 34 +++++++++++++++++++++ 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index c8db4bc5960c..c2c925e0477c 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -7,6 +7,7 @@ AssignmentExpr, AssignmentStmt, BreakStmt, + ClassDef, Context, ContinueStmt, DictionaryComprehension, @@ -271,13 +272,16 @@ def variable_may_be_undefined(self, name: str, context: Context) -> None: if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): self.msg.variable_may_be_undefined(name, context) + def process_definition(self, name: str) -> None: + # Was this name previously used? If yes, it's a use-before-definition error. + refs = self.tracker.pop_undefined_ref(name) + for ref in refs: + self.var_used_before_def(name, ref) + self.tracker.record_definition(name) + def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): - # Was this name previously used? If yes, it's a use-before-definition error. - refs = self.tracker.pop_undefined_ref(lvalue.name) - for ref in refs: - self.var_used_before_def(lvalue.name, ref) - self.tracker.record_definition(lvalue.name) + self.process_definition(lvalue.name) elif isinstance(lvalue, StarExpr): self.process_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): @@ -327,7 +331,7 @@ def visit_match_stmt(self, o: MatchStmt) -> None: self.tracker.end_branch_statement() def visit_func_def(self, o: FuncDef) -> None: - self.tracker.record_definition(o.name) + self.process_definition(o.name) self.tracker.enter_scope() super().visit_func_def(o) self.tracker.exit_scope() @@ -476,6 +480,12 @@ def visit_with_stmt(self, o: WithStmt) -> None: self.process_lvalue(idx) o.body.accept(self) + def visit_class_def(self, o: ClassDef) -> None: + self.process_definition(o.name) + self.tracker.enter_scope() + super().visit_class_def(o) + self.tracker.exit_scope() + def visit_import(self, o: Import) -> None: for mod, alias in o.ids: if alias is not None: diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 52822f98ab53..7c10306684ca 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -140,6 +140,40 @@ def f0(b: bool) -> None: fn = lambda: 2 y = fn # E: Name "fn" may be undefined +[case testUseBeforeDefClass] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f(x: A): # No error here. + pass +y = A() # E: Name "A" is used before definition +class A: pass + +[case testClassScope] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +class C: + x = 0 + def f0(self) -> None: pass + + def f2(self) -> None: + f0() # No error. + self.f0() # No error. + +f0() # E: Name "f0" is used before definition +def f0() -> None: pass +y = x # E: Name "x" is used before definition +x = 1 + +[case testClassInsideFunction] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f() -> None: + class C: pass + +c = C() # E: Name "C" is used before definition +class C: pass + +[case testUseBeforeDefFunc] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +foo() # E: Name "foo" is used before definition +def foo(): pass [case testGenerator] # flags: --enable-error-code partially-defined if int(): From 3a3cf412b278ee7bf710742b168beed41c1c02f2 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 29 Nov 2022 16:43:54 +0300 Subject: [PATCH 092/292] [stubtest] Add `__warningregistry__` to the list of ignored module dunders (#14218) We have multiple of these in `typeshed`: https://github.com/python/typeshed/search?q=__warningregistry__ --- mypy/stubtest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 8add8178823a..5e7f9cf331b2 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1243,6 +1243,8 @@ def verify_typealias( "__annotations__", "__path__", # mypy adds __path__ to packages, but C packages don't have it "__getattr__", # resulting behaviour might be typed explicitly + # Created by `warnings.warn`, does not make much sense to have in stubs: + "__warningregistry__", # TODO: remove the following from this list "__author__", "__version__", From 98f1b00a3b41b3734f7a20adbd755c13e37beb43 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Tue, 29 Nov 2022 16:07:17 -0600 Subject: [PATCH 093/292] [partially defined] fix false positive with global/nonlocal vars (#14222) This is a workaround until we implement better handling for variables undefined in global scope (see #14213). We treat `global/nonlocal` as a variable declaration. I've included test cases that should fail in the future once we implement the check properly. --- mypy/partially_defined.py | 12 ++++++ test-data/unit/check-partially-defined.test | 43 +++++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index c2c925e0477c..1a38501e9f67 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -17,6 +17,7 @@ FuncDef, FuncItem, GeneratorExpr, + GlobalDecl, IfStmt, Import, ImportFrom, @@ -25,6 +26,7 @@ Lvalue, MatchStmt, NameExpr, + NonlocalDecl, RaiseStmt, RefExpr, ReturnStmt, @@ -279,6 +281,16 @@ def process_definition(self, name: str) -> None: self.var_used_before_def(name, ref) self.tracker.record_definition(name) + def visit_global_decl(self, o: GlobalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_global_decl(o) + + def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_nonlocal_decl(o) + def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): self.process_definition(lvalue.name) diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 7c10306684ca..3359702b60bc 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -191,6 +191,49 @@ if int(): y = 3 x = y # E: Name "y" may be undefined +[case testVarDefinedInOuterScopeUpdated] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f0() -> None: + global x + y = x + x = 1 # No error. + +x = 2 + +[case testNonlocalVar] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f0() -> None: + x = 2 + + def inner() -> None: + nonlocal x + y = x + x = 1 # No error. + + +[case testGlobalDeclarationAfterUsage] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f0() -> None: + y = x # E: Name "x" is used before definition + global x + x = 1 # No error. + +x = 2 +[case testVarDefinedInOuterScope] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def f0() -> None: + global x + y = x # We do not detect such errors right now. + +f0() +x = 1 +[case testDefinedInOuterScopeNoError] +# flags: --enable-error-code partially-defined --enable-error-code use-before-def +def foo() -> None: + bar() + +def bar() -> None: + foo() [case testFuncParams] # flags: --enable-error-code partially-defined def foo(a: int) -> None: From d3427c1ad5064afb6bddee459a24a91fdc036b76 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Wed, 30 Nov 2022 06:44:08 -0600 Subject: [PATCH 094/292] [partially defined] fix a false-negative with variable defined in a skipped branch (#14221) The goal of partially-defined check is to detect variables that could be undefined but semantic analyzer doesn't detect them as undefined. In this case, a variable was defined in a branch that returns, so semantic analyzer considered it defined when it was not. I've discovered this when testing support for try/except statements (#14114). --- mypy/partially_defined.py | 32 ++++++++++++--------- test-data/unit/check-partially-defined.test | 6 ++++ 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 1a38501e9f67..3ec8db3665cd 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -121,23 +121,29 @@ def is_defined_in_a_branch(self, name: str) -> bool: return False def done(self) -> BranchState: - branches = [b for b in self.branches if not b.skipped] - if len(branches) == 0: - return BranchState(skipped=True) - if len(branches) == 1: - return branches[0] - - # must_be_defined is a union of must_be_defined of all branches. - must_be_defined = set(branches[0].must_be_defined) - for b in branches[1:]: - must_be_defined.intersection_update(b.must_be_defined) - # may_be_defined are all variables that are not must be defined. + # First, compute all vars, including skipped branches. We include skipped branches + # because our goal is to capture all variables that semantic analyzer would + # consider defined. all_vars = set() - for b in branches: + for b in self.branches: all_vars.update(b.may_be_defined) all_vars.update(b.must_be_defined) + # For the rest of the things, we only care about branches that weren't skipped. + non_skipped_branches = [b for b in self.branches if not b.skipped] + if len(non_skipped_branches) > 0: + must_be_defined = non_skipped_branches[0].must_be_defined + for b in non_skipped_branches[1:]: + must_be_defined.intersection_update(b.must_be_defined) + else: + must_be_defined = set() + # Everything that wasn't defined in all branches but was defined + # in at least one branch should be in `may_be_defined`! may_be_defined = all_vars.difference(must_be_defined) - return BranchState(may_be_defined=may_be_defined, must_be_defined=must_be_defined) + return BranchState( + must_be_defined=must_be_defined, + may_be_defined=may_be_defined, + skipped=len(non_skipped_branches) == 0, + ) class Scope: diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test index 3359702b60bc..623e897e865d 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-partially-defined.test @@ -345,6 +345,12 @@ def f5() -> int: return 3 return 1 +def f6() -> int: + if int(): + x = 0 + return x + return x # E: Name "x" may be undefined + [case testDefinedDifferentBranchUseBeforeDef] # flags: --enable-error-code partially-defined --enable-error-code use-before-def From 6e9227a8cedf268799d2f0442ec855db46414778 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 30 Nov 2022 19:43:29 -0800 Subject: [PATCH 095/292] Sync typeshed (#14228) Sync typeshed Source commit: https://github.com/python/typeshed/commit/87d2683ef974ef6d6d8b38313f73e9ef0acad06c --- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 26 ++++++++- mypy/typeshed/stdlib/abc.pyi | 4 +- mypy/typeshed/stdlib/asyncio/base_events.pyi | 28 +++++----- mypy/typeshed/stdlib/asyncio/events.pyi | 32 +++++------ mypy/typeshed/stdlib/builtins.pyi | 53 ++++++++++++------- mypy/typeshed/stdlib/marshal.pyi | 30 +++++++++-- .../stdlib/multiprocessing/reduction.pyi | 37 ++++++++----- mypy/typeshed/stdlib/ssl.pyi | 4 ++ mypy/typeshed/stdlib/string.pyi | 11 +++- mypy/typeshed/stdlib/unicodedata.pyi | 9 ++-- mypy/typeshed/stdlib/unittest/mock.pyi | 2 +- mypy/typeshed/stdlib/urllib/parse.pyi | 19 +++++-- mypy/typeshed/stdlib/urllib/request.pyi | 48 +++++++++-------- mypy/typeshed/stdlib/xmlrpc/client.pyi | 45 +++++++++++----- mypy/typeshed/stdlib/xmlrpc/server.pyi | 6 +-- 15 files changed, 234 insertions(+), 120 deletions(-) diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 3ae2fca1d19d..f01db74caf40 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -236,6 +236,29 @@ else: ReadableBuffer: TypeAlias = ReadOnlyBuffer | WriteableBuffer # stable _BufferWithLen: TypeAlias = ReadableBuffer # not stable # noqa: Y047 +# Anything that implements the read-write buffer interface, and can be sliced/indexed. +SliceableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +IndexableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +# https://github.com/python/typeshed/pull/9115#issuecomment-1304905864 +# Post PEP 688, they should be rewritten as such: +# from collections.abc import Sequence +# from typing import Sized, overload +# class SliceableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# class IndexableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __i: int) -> int: ... +# class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __contains__(self, __x: Any) -> bool: ... +# @overload +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# @overload +# def __getitem__(self, __i: int) -> int: ... +# class SizedBuffer(Sized, Protocol): # instead of _BufferWithLen +# def __buffer__(self, __flags: int) -> memoryview: ... + ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] OptExcInfo: TypeAlias = Union[ExcInfo, tuple[None, None, None]] @@ -276,5 +299,4 @@ StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar -# TODO: Ideally this would be a recursive type alias -TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Callable[[FrameType, str, Any], Any] | None] +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index 110eba24a9ca..7b39c88ed394 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -18,8 +18,8 @@ class ABCMeta(type): else: def __new__(mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any) -> Self: ... - def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... - def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = ...) -> None: ... def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index c1ab114b6036..83576ab6455e 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -5,7 +5,7 @@ from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandl from asyncio.futures import Future from asyncio.protocols import BaseProtocol from asyncio.tasks import Task -from asyncio.transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from collections.abc import Awaitable, Callable, Coroutine, Generator, Iterable, Sequence from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket @@ -129,7 +129,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -148,7 +148,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload async def create_connection( @@ -167,7 +167,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -185,7 +185,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_connection( @@ -202,7 +202,7 @@ class BaseEventLoop(AbstractEventLoop): local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -218,7 +218,7 @@ class BaseEventLoop(AbstractEventLoop): local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload async def create_server( @@ -266,7 +266,7 @@ class BaseEventLoop(AbstractEventLoop): server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -275,7 +275,7 @@ class BaseEventLoop(AbstractEventLoop): ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_server( @@ -320,7 +320,7 @@ class BaseEventLoop(AbstractEventLoop): server_side: bool = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -328,13 +328,13 @@ class BaseEventLoop(AbstractEventLoop): *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... ) -> int: ... async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] @@ -349,7 +349,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... else: async def create_datagram_endpoint( self, @@ -364,7 +364,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 280be4ab5ba9..7241d5a29f8d 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -12,7 +12,7 @@ from .base_events import Server from .futures import Future from .protocols import BaseProtocol from .tasks import Task -from .transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from .unix_events import AbstractChildWatcher if sys.version_info >= (3, 8): @@ -223,7 +223,7 @@ class AbstractEventLoop: ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -243,7 +243,7 @@ class AbstractEventLoop: ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload @abstractmethod @@ -263,7 +263,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -282,7 +282,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload @abstractmethod @@ -300,7 +300,7 @@ class AbstractEventLoop: local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -317,7 +317,7 @@ class AbstractEventLoop: local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload @abstractmethod @@ -360,7 +360,7 @@ class AbstractEventLoop: @abstractmethod async def start_tls( self, - transport: BaseTransport, + transport: WriteTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, @@ -368,7 +368,7 @@ class AbstractEventLoop: server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -428,7 +428,7 @@ class AbstractEventLoop: server_side: bool = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -449,7 +449,7 @@ class AbstractEventLoop: ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, @@ -458,7 +458,7 @@ class AbstractEventLoop: *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -470,7 +470,7 @@ class AbstractEventLoop: server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: async def create_unix_connection( self, @@ -481,7 +481,7 @@ class AbstractEventLoop: sock: socket | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @abstractmethod async def sock_sendfile( @@ -489,7 +489,7 @@ class AbstractEventLoop: ) -> int: ... @abstractmethod async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... ) -> int: ... @abstractmethod async def create_datagram_endpoint( @@ -505,7 +505,7 @@ class AbstractEventLoop: reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod async def connect_read_pipe( diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 5482955eb0ab..a47d774467dd 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1232,19 +1232,13 @@ def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: @overload def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... -# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): - def isinstance( - __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - def issubclass( - __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] else: - def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... - def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... def locals() -> dict[str, Any]: ... @@ -1852,6 +1846,7 @@ if sys.version_info >= (3, 11): _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + # See `check_exception_group.py` for use-cases and comments. class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... @property @@ -1859,18 +1854,34 @@ if sys.version_info >= (3, 11): @property def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... @overload + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload def subgroup( self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> Self | None: ... + def subgroup( + self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... + @overload + def split( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... @overload def split( - self: Self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, Self | None]: ... + self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + # In reality it is `NonEmptySequence`: @overload - def split(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... - def derive(self: Self, __excs: Sequence[_BaseExceptionT_co]) -> Self: ... + def derive(self, __excs: Sequence[_ExceptionT]) -> ExceptionGroup[_ExceptionT]: ... + @overload + def derive(self, __excs: Sequence[_BaseExceptionT]) -> BaseExceptionGroup[_BaseExceptionT]: ... def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): @@ -1883,10 +1894,14 @@ if sys.version_info >= (3, 11): self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] ) -> ExceptionGroup[_ExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> Self | None: ... + def subgroup( + self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> ExceptionGroup[_ExceptionT_co] | None: ... @overload # type: ignore[override] def split( - self: Self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> tuple[ExceptionGroup[_ExceptionT] | None, Self | None]: ... + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... @overload - def split(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... + def split( + self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index d68cdd143109..d46d9c10483d 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -1,9 +1,33 @@ +import builtins +import types from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite -from typing import Any +from typing import Any, Union +from typing_extensions import TypeAlias version: int -def dump(__value: Any, __file: SupportsWrite[bytes], __version: int = ...) -> None: ... +_Marshallable: TypeAlias = Union[ + # handled in w_object() in marshal.c + None, + type[StopIteration], + builtins.ellipsis, + bool, + # handled in w_complex_object() in marshal.c + int, + float, + complex, + bytes, + str, + tuple[_Marshallable, ...], + list[Any], + dict[Any, Any], + set[Any], + frozenset[_Marshallable], + types.CodeType, + ReadableBuffer, +] + +def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = ...) -> None: ... def load(__file: SupportsRead[bytes]) -> Any: ... -def dumps(__value: Any, __version: int = ...) -> bytes: ... +def dumps(__value: _Marshallable, __version: int = ...) -> bytes: ... def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi index cab86d866bab..d6b70aefa48d 100644 --- a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi @@ -1,9 +1,14 @@ import pickle import sys -from _typeshed import HasFileno, Incomplete +from _typeshed import HasFileno, SupportsWrite from abc import ABCMeta +from builtins import type as Type # alias to avoid name clash +from collections.abc import Callable from copyreg import _DispatchTableType +from multiprocessing import connection +from pickle import _ReducedType from socket import socket +from typing import Any from typing_extensions import Literal if sys.platform == "win32": @@ -11,18 +16,20 @@ if sys.platform == "win32": else: __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] +HAVE_SEND_HANDLE: bool + class ForkingPickler(pickle.Pickler): dispatch_table: _DispatchTableType - def __init__(self, *args) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod - def register(cls, type, reduce) -> None: ... + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... @classmethod - def dumps(cls, obj, protocol: Incomplete | None = ...): ... + def dumps(cls, obj: Any, protocol: int | None = ...) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj, file, protocol: Incomplete | None = ...) -> None: ... +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... if sys.platform == "win32": if sys.version_info >= (3, 8): @@ -32,13 +39,13 @@ if sys.platform == "win32": else: def duplicate(handle: int, target_process: int | None = ..., inheritable: bool = ...) -> int: ... - def steal_handle(source_pid, handle): ... - def send_handle(conn, handle, destination_pid) -> None: ... - def recv_handle(conn): ... + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection) -> int: ... class DupHandle: - def __init__(self, handle, access, pid: Incomplete | None = ...) -> None: ... - def detach(self): ... + def __init__(self, handle: int, access: int, pid: int | None = ...) -> None: ... + def detach(self) -> int: ... else: if sys.platform == "darwin": @@ -47,10 +54,11 @@ else: ACKNOWLEDGE: Literal[False] def recvfds(sock: socket, size: int) -> list[int]: ... - def send_handle(conn, handle, destination_pid) -> None: ... + # destination_pid is unused + def send_handle(conn: HasFileno, handle: int, destination_pid: object) -> None: ... def recv_handle(conn: HasFileno) -> int: ... - def sendfds(sock, fds) -> None: ... - def DupFd(fd): ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -84,4 +92,5 @@ class AbstractReducer(metaclass=ABCMeta): sendfds = _sendfds recvfds = _recvfds DupFd = _DupFd - def __init__(self, *args) -> None: ... + # *args are unused + def __init__(self, *args: object) -> None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 543433f2fbd0..91844e8369df 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -180,6 +180,8 @@ class Options(enum.IntFlag): OP_NO_RENEGOTIATION: int if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: int + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: int OP_ALL: Options OP_NO_SSLv2: Options @@ -196,6 +198,8 @@ OP_NO_TICKET: Options OP_NO_RENEGOTIATION: Options if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool HAS_SSLv2: bool diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index 6fb803fe53be..49802ce81019 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -64,11 +64,20 @@ class Formatter: ) -> LiteralString: ... @overload def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... + def _vformat( # undocumented + self, + format_string: str, + args: Sequence[Any], + kwargs: Mapping[str, Any], + used_args: set[int | str], + recursion_depth: int, + auto_arg_index: int = ..., + ) -> tuple[str, int]: ... def parse( self, format_string: StrOrLiteralStr ) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... - def check_unused_args(self, used_args: Sequence[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... + def check_unused_args(self, used_args: set[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index 5a9aa0a3395f..4569d6584fd6 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import ReadOnlyBuffer from typing import Any, TypeVar -from typing_extensions import final +from typing_extensions import Literal, TypeAlias, final ucd_3_2_0: UCD unidata_version: str @@ -17,7 +17,10 @@ def combining(__chr: str) -> int: ... def decimal(__chr: str, __default: _T = ...) -> int | _T: ... def decomposition(__chr: str) -> str: ... def digit(__chr: str, __default: _T = ...) -> int | _T: ... -def east_asian_width(__chr: str) -> str: ... + +_EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] + +def east_asian_width(__chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(__form: str, __unistr: str) -> bool: ... @@ -38,7 +41,7 @@ class UCD: def decimal(self, __chr: str, __default: _T = ...) -> int | _T: ... def decomposition(self, __chr: str) -> str: ... def digit(self, __chr: str, __default: _T = ...) -> int | _T: ... - def east_asian_width(self, __chr: str) -> str: ... + def east_asian_width(self, __chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(self, __form: str, __unistr: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 6c58f38a0d82..e4cedef1b425 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -82,7 +82,7 @@ class _Call(tuple[Any, ...]): def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... - def __getattr__(self, attr: Any) -> Any: ... + def __getattr__(self, attr: str) -> Any: ... def __getattribute__(self, attr: str) -> Any: ... if sys.version_info >= (3, 8): @property diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 02e2774b3b8e..efb91a4b34ff 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -147,7 +147,7 @@ if sys.version_info >= (3, 9): else: def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... -def unquote_to_bytes(string: str | bytes) -> bytes: ... +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @@ -168,11 +168,22 @@ def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> An @overload def urlparse(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> ParseResult: ... @overload -def urlparse(url: bytes | bytearray | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... +def urlparse( + url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... +) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> SplitResult: ... -@overload -def urlsplit(url: bytes | bytearray | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... + +if sys.version_info >= (3, 11): + @overload + def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... + +else: + @overload + def urlsplit( + url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... + ) -> SplitResultBytes: ... + @overload def urlunparse( components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 3cd5fc740fca..00c160293762 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import StrOrBytesPath, SupportsRead +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from email.message import Message from http.client import HTTPConnection, HTTPMessage, HTTPResponse @@ -50,7 +50,7 @@ __all__ = [ _T = TypeVar("_T") _UrlopenRet: TypeAlias = Any -_DataType: TypeAlias = bytes | SupportsRead[bytes] | Iterable[bytes] | None +_DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None def urlopen( url: str | Request, @@ -214,7 +214,7 @@ class AbstractDigestAuthHandler: def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... - def get_entity_digest(self, data: bytes | None, chal: Mapping[str, str]) -> str | None: ... + def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header: ClassVar[str] # undocumented @@ -301,14 +301,14 @@ def urlcleanup() -> None: ... class URLopener: version: ClassVar[str] def __init__(self, proxies: dict[str, str] | None = ..., **x509: str) -> None: ... - def open(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... + def open(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... def retrieve( self, url: str, filename: str | None = ..., reporthook: Callable[[int, int, int], object] | None = ..., - data: bytes | None = ..., + data: ReadableBuffer | None = ..., ) -> tuple[str, Message | None]: ... def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented @@ -319,32 +319,32 @@ class URLopener: def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: bytes | None = ...) -> addinfourl: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = ...) -> addinfourl: ... # undocumented def open_file(self, url: str) -> addinfourl: ... # undocumented def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: bytes | None = ...) -> None: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = ...) -> None: ... # undocumented class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> tuple[str, str]: ... # undocumented def http_error_301( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_302( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_303( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_307( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented if sys.version_info >= (3, 11): def http_error_308( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_401( @@ -354,7 +354,7 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., + data: ReadableBuffer | None = ..., retry: bool = ..., ) -> _UrlopenRet | None: ... # undocumented def http_error_407( @@ -364,20 +364,24 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., + data: ReadableBuffer | None = ..., retry: bool = ..., ) -> _UrlopenRet | None: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> addinfourl: ... # undocumented def redirect_internal( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = ... + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented - def retry_http_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented - def retry_https_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented def retry_proxy_http_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented def retry_proxy_https_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 150291009f54..0e048f57844d 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -2,7 +2,7 @@ import gzip import http.client import sys import time -from _typeshed import Self, SupportsRead, SupportsWrite +from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite, _BufferWithLen from collections.abc import Callable, Iterable, Mapping from datetime import datetime from io import BytesIO @@ -15,7 +15,20 @@ class _SupportsTimeTuple(Protocol): _DateTimeComparable: TypeAlias = DateTime | datetime | str | _SupportsTimeTuple _Marshallable: TypeAlias = ( - bool | int | float | str | bytes | None | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime | DateTime | Binary + bool + | int + | float + | str + | bytes + | bytearray + | None + | tuple[_Marshallable, ...] + # Ideally we'd use _Marshallable for list and dict, but invariance makes that impractical + | list[Any] + | dict[str, Any] + | datetime + | DateTime + | Binary ) _XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time _HostType: TypeAlias = Union[tuple[str, dict[str, str]], str] @@ -83,18 +96,18 @@ def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: data: bytes - def __init__(self, data: bytes | None = ...) -> None: ... - def decode(self, data: bytes) -> None: ... + def __init__(self, data: bytes | bytearray | None = ...) -> None: ... + def decode(self, data: ReadableBuffer) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... def __eq__(self, other: object) -> bool: ... -def _binary(data: bytes) -> Binary: ... # undocumented +def _binary(data: ReadableBuffer) -> Binary: ... # undocumented WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented class ExpatParser: # undocumented def __init__(self, target: Unmarshaller) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... _WriteCallback: TypeAlias = Callable[[str], object] @@ -115,7 +128,7 @@ class Marshaller: def dump_int(self, value: int, write: _WriteCallback) -> None: ... def dump_double(self, value: float, write: _WriteCallback) -> None: ... def dump_unicode(self, value: str, write: _WriteCallback, escape: Callable[[str], str] = ...) -> None: ... - def dump_bytes(self, value: bytes, write: _WriteCallback) -> None: ... + def dump_bytes(self, value: ReadableBuffer, write: _WriteCallback) -> None: ... def dump_array(self, value: Iterable[_Marshallable], write: _WriteCallback) -> None: ... def dump_struct( self, value: Mapping[str, _Marshallable], write: _WriteCallback, escape: Callable[[str], str] = ... @@ -196,13 +209,13 @@ def dumps( allow_none: bool = ..., ) -> str: ... def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[tuple[_Marshallable, ...], str | None]: ... -def gzip_encode(data: bytes) -> bytes: ... # undocumented -def gzip_decode(data: bytes, max_decode: int = ...) -> bytes: ... # undocumented +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = ...) -> bytes: ... # undocumented class GzipDecodedResponse(gzip.GzipFile): # undocumented io: BytesIO - def __init__(self, response: SupportsRead[bytes]) -> None: ... + def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... class _Method: # undocumented @@ -231,17 +244,21 @@ class Transport: else: def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... - def request(self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ...) -> tuple[_Marshallable, ...]: ... + def request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... + ) -> tuple[_Marshallable, ...]: ... def single_request( - self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ... + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... ) -> tuple[_Marshallable, ...]: ... def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... def close(self) -> None: ... - def send_request(self, host: _HostType, handler: str, request_body: bytes, debug: bool) -> http.client.HTTPConnection: ... + def send_request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, debug: bool + ) -> http.client.HTTPConnection: ... def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... - def send_content(self, connection: http.client.HTTPConnection, request_body: bytes) -> None: ... + def send_content(self, connection: http.client.HTTPConnection, request_body: _BufferWithLen) -> None: ... def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index c11d8d8e7a14..4d28974cbbed 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -2,14 +2,10 @@ import http.server import pydoc import socketserver from collections.abc import Callable, Iterable, Mapping -from datetime import datetime from re import Pattern from typing import Any, ClassVar, Protocol from typing_extensions import TypeAlias -from xmlrpc.client import Fault - -# TODO: Recursive type on tuple, list, dict -_Marshallable: TypeAlias = None | bool | int | float | str | bytes | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime +from xmlrpc.client import Fault, _Marshallable # The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy class _DispatchArity0(Protocol): From 3c71548c0bc7955c462211985859c273bd9224bc Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 1 Dec 2022 02:14:28 -0800 Subject: [PATCH 096/292] Fix false negatives involving Unions and generators or coroutines (#14224) Fixes #14223 --- mypy/checker.py | 12 ++++++++++++ test-data/unit/check-async-await.test | 18 ++++++++++++++++++ test-data/unit/check-statements.test | 9 +++++++++ 3 files changed, 39 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index 80f7e19c65f0..1c8956ae6722 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -848,6 +848,10 @@ def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Typ if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_yield_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -878,6 +882,10 @@ def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> T if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_receive_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -917,6 +925,10 @@ def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Ty if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_return_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type(return_type, is_coroutine): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 195e70cf5880..d53cba2fc642 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -925,3 +925,21 @@ async def f() -> AsyncGenerator[int, None]: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testAwaitUnion] +from typing import overload, Union + +class A: ... +class B: ... + +@overload +async def foo(x: A) -> B: ... +@overload +async def foo(x: B) -> A: ... +async def foo(x): ... + +async def bar(x: Union[A, B]) -> None: + reveal_type(await foo(x)) # N: Revealed type is "Union[__main__.B, __main__.A]" + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 4be5060996e2..3450f8593d27 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -2206,3 +2206,12 @@ def foo(): x: int = "no" # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs y = "no" # type: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs z: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs + +[case testGeneratorUnion] +from typing import Generator, Union + +class A: pass +class B: pass + +def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: + yield x # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]") From 8da17d71e156140e67057703a46805637ab869cf Mon Sep 17 00:00:00 2001 From: jhance Date: Thu, 1 Dec 2022 09:35:41 -0800 Subject: [PATCH 097/292] Give TypeVarTupleType a fallback (#14231) We want to eventually decomission the use of TypeList for constraints/expanded values of TypeVarTupleTypes. In order to do that we will need a reference to the tuple fallback. All the places we currently construct these TypeLists for the use in variadic generics have access to a TypeVarTupleType, so putting the tuple fallback into the typevar type is a reasonable way to make that information accessible where we need it. In order to get it into the type, we first put it into the expr by making a symbol table lookup in semanal. --- mypy/copytype.py | 2 +- mypy/nodes.py | 17 ++++++++++++++++- mypy/semanal.py | 3 ++- mypy/test/typefixture.py | 25 +++++++++++++------------ mypy/treetransform.py | 6 +++++- mypy/tvar_scope.py | 1 + mypy/typeanal.py | 2 ++ mypy/types.py | 22 +++++++++++++++++++++- mypy/typevars.py | 10 +++++++++- test-data/unit/semanal-errors.test | 1 + test-data/unit/semanal-types.test | 1 + 11 files changed, 72 insertions(+), 18 deletions(-) diff --git a/mypy/copytype.py b/mypy/copytype.py index baa1ba34cbac..6024e527705b 100644 --- a/mypy/copytype.py +++ b/mypy/copytype.py @@ -94,7 +94,7 @@ def visit_parameters(self, t: Parameters) -> ProperType: return self.copy_common(t, dup) def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: - dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound) + dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound, t.tuple_fallback) return self.copy_common(t, dup) def visit_unpack_type(self, t: UnpackType) -> ProperType: diff --git a/mypy/nodes.py b/mypy/nodes.py index c02e21e88b44..80ab787f4a9c 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2526,10 +2526,23 @@ def deserialize(cls, data: JsonDict) -> ParamSpecExpr: class TypeVarTupleExpr(TypeVarLikeExpr): """Type variable tuple expression TypeVarTuple(...).""" - __slots__ = () + __slots__ = "tuple_fallback" + + tuple_fallback: mypy.types.Instance __match_args__ = ("name", "upper_bound") + def __init__( + self, + name: str, + fullname: str, + upper_bound: mypy.types.Type, + tuple_fallback: mypy.types.Instance, + variance: int = INVARIANT, + ) -> None: + super().__init__(name, fullname, upper_bound, variance) + self.tuple_fallback = tuple_fallback + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_tuple_expr(self) @@ -2539,6 +2552,7 @@ def serialize(self) -> JsonDict: "name": self._name, "fullname": self._fullname, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), "variance": self.variance, } @@ -2549,6 +2563,7 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: data["name"], data["fullname"], mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.Instance.deserialize(data["tuple_fallback"]), data["variance"], ) diff --git a/mypy/semanal.py b/mypy/semanal.py index 3e1e1a1e5d61..266dc891b697 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -4119,8 +4119,9 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: # PEP 646 does not specify the behavior of variance, constraints, or bounds. if not call.analyzed: + tuple_fallback = self.named_type("builtins.tuple", [self.object_type()]) typevartuple_var = TypeVarTupleExpr( - name, self.qualified_name(name), self.object_type(), INVARIANT + name, self.qualified_name(name), self.object_type(), tuple_fallback, INVARIANT ) typevartuple_var.line = call.line call.analyzed = typevartuple_var diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index bd8351171208..d12e7abab0e2 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -56,9 +56,6 @@ def make_type_var( ) -> TypeVarType: return TypeVarType(name, name, id, values, upper_bound, variance) - def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: - return TypeVarTupleType(name, name, id, upper_bound) - self.t = make_type_var("T", 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var("T", -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var("T", -2, [], self.o, variance) # T`-2 (type variable) @@ -68,10 +65,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self.sf1 = make_type_var("S", -1, [], self.o, variance) # S`-1 (type variable) self.u = make_type_var("U", 3, [], self.o, variance) # U`3 (type variable) - self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) - self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) - self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) - # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneType() @@ -133,10 +126,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy bases=[Instance(self.gi, [self.s1])], ) - self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) - self.gv2i = self.make_type_info( - "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 - ) # list[T] self.std_listi = self.make_type_info( "builtins.list", mro=[self.oi], typevars=["T"], variances=[variance] @@ -218,6 +207,18 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai) + def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: + return TypeVarTupleType(name, name, id, upper_bound, self.std_tuple) + + self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) + self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) + self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) + + self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) + self.gv2i = self.make_type_info( + "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 + ) + def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) @@ -296,7 +297,7 @@ def make_type_info( v: list[TypeVarLikeType] = [] for id, n in enumerate(typevars, 1): if typevar_tuple_index is not None and id - 1 == typevar_tuple_index: - v.append(TypeVarTupleType(n, n, id, self.o)) + v.append(TypeVarTupleType(n, n, id, self.o, self.std_tuple)) else: if variances: variance = variances[id - 1] diff --git a/mypy/treetransform.py b/mypy/treetransform.py index 432baf7d73b7..535f50d5cf5e 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -653,7 +653,11 @@ def visit_paramspec_expr(self, node: ParamSpecExpr) -> ParamSpecExpr: def visit_type_var_tuple_expr(self, node: TypeVarTupleExpr) -> TypeVarTupleExpr: return TypeVarTupleExpr( - node.name, node.fullname, self.type(node.upper_bound), variance=node.variance + node.name, + node.fullname, + self.type(node.upper_bound), + node.tuple_fallback, + variance=node.variance, ) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index f926d0dfb883..db83768bf68a 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -115,6 +115,7 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: tvar_expr.fullname, i, upper_bound=tvar_expr.upper_bound, + tuple_fallback=tvar_expr.tuple_fallback, line=tvar_expr.line, column=tvar_expr.column, ) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index f34f6ef49f6c..468b10fc9847 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -364,12 +364,14 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.fail( f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE ) + # Change the line number return TypeVarTupleType( tvar_def.name, tvar_def.fullname, tvar_def.id, tvar_def.upper_bound, + sym.node.tuple_fallback, line=t.line, column=t.column, ) diff --git a/mypy/types.py b/mypy/types.py index 326727310a1b..b5a4f90d5ec3 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -720,6 +720,20 @@ class TypeVarTupleType(TypeVarLikeType): See PEP646 for more information. """ + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId | int, + upper_bound: Type, + tuple_fallback: Instance, + *, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(name, fullname, id, upper_bound, line=line, column=column) + self.tuple_fallback = tuple_fallback + def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { @@ -728,13 +742,18 @@ def serialize(self) -> JsonDict: "fullname": self.fullname, "id": self.id.raw_id, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> TypeVarTupleType: assert data[".class"] == "TypeVarTupleType" return TypeVarTupleType( - data["name"], data["fullname"], data["id"], deserialize_type(data["upper_bound"]) + data["name"], + data["fullname"], + data["id"], + deserialize_type(data["upper_bound"]), + Instance.deserialize(data["tuple_fallback"]), ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -759,6 +778,7 @@ def copy_modified(self, id: Bogus[TypeVarId | int] = _dummy) -> TypeVarTupleType self.fullname, self.id if id is _dummy else id, self.upper_bound, + self.tuple_fallback, line=self.line, column=self.column, ) diff --git a/mypy/typevars.py b/mypy/typevars.py index 9c813550d5ea..69c2eed37fa4 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -39,7 +39,15 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType: ) elif isinstance(tv, TypeVarTupleType): tv = UnpackType( - TypeVarTupleType(tv.name, tv.fullname, tv.id, tv.upper_bound, line=-1, column=-1) + TypeVarTupleType( + tv.name, + tv.fullname, + tv.id, + tv.upper_bound, + tv.tuple_fallback, + line=-1, + column=-1, + ) ) else: assert isinstance(tv, ParamSpecType) diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 2b10beacbf97..5697d473414e 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1474,3 +1474,4 @@ y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]): # E: Can only use one type var tuple in a class def pass +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 8dc767e1abfc..77ef10a26b13 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1560,3 +1560,4 @@ MypyFile:1( AssignmentStmt:2( NameExpr(TV* [__main__.TV]) TypeVarTupleExpr:2())) +[builtins fixtures/tuple.pyi] From ceb976f1b9bc4c3ae7cd07a5222ec076f6c7ab72 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 1 Dec 2022 23:36:00 +0300 Subject: [PATCH 098/292] [stubtest] associate exported symbol error with __all__ object_path (#14217) --- mypy/stubtest.py | 4 ++-- mypy/test/teststubtest.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 5e7f9cf331b2..5e39b996076b 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -261,10 +261,10 @@ def _verify_exported_names( if not (names_in_runtime_not_stub or names_in_stub_not_runtime): return yield Error( - object_path, + object_path + ["__all__"], ( "names exported from the stub do not correspond to the names exported at runtime. " - "This is probably due to an inaccurate `__all__` in the stub or things being missing from the stub." + "This is probably due to things being missing from the stub, or if present, an inaccurate `__all__` in the stub" ), # Pass in MISSING instead of the stub and runtime objects, as the line numbers aren't very # relevant here, and it makes for a prettier error message diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index e863f4f57568..812333e3feb4 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1028,7 +1028,7 @@ def test_all_in_stub_not_at_runtime(self) -> Iterator[Case]: @collect_cases def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: - # We *should* emit an error with the module name itself, + # We *should* emit an error with the module name itself + __all__, # if the stub *does* define __all__, # but the stub's __all__ is inconsistent with the runtime's __all__ yield Case( @@ -1040,7 +1040,7 @@ def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: __all__ = [] foo = 'foo' """, - error="", + error="__all__", ) @collect_cases From 740b36428d8817d276e46f817ac11b18cea4d766 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 2 Dec 2022 00:06:34 +0000 Subject: [PATCH 099/292] Fix --fast-exit argument (#14229) Previously --fast-exit would actually disable the fast_exit option. Now it behaves as expected (a no-op, since fast exit is enabled by default). --- mypy/main.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy/main.py b/mypy/main.py index d0cb6ca4d505..8f60d13074a0 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1008,7 +1008,10 @@ def add_invertible_flag( help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.", ) - add_invertible_flag("--fast-exit", default=True, help=argparse.SUPPRESS, group=internals_group) + internals_group.add_argument("--fast-exit", action="store_true", help=argparse.SUPPRESS) + internals_group.add_argument( + "--no-fast-exit", action="store_false", dest="fast_exit", help=argparse.SUPPRESS + ) # This flag is useful for mypy tests, where function bodies may be omitted. Plugin developers # may want to use this as well in their tests. add_invertible_flag( From d5e96e381f72ad3fafaae8707b688b3da320587d Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Fri, 2 Dec 2022 07:42:28 -0800 Subject: [PATCH 100/292] [mypyc] Add `match` statement support (#13953) Closes https://github.com/mypyc/mypyc/issues/911 Like the title says, this PR adds support for compiling `match` statements in mypyc. Most of the work has been done, but there are some things which are still a WIP. A todo list of what has been done, and the (small) number of things that need to be worked out: - [x] Or patterns: `1 | 2 | 3` - [x] Value patterns: `123`, `x.y.z`, etc. - [x] Singleton patterns: `True`, `False`, and `None` - [x] Sequence patterns: - [x] Fixed length patterns `[1, 2, 3]` - [x] Starred patterns `[*prev, 4, 5, 6]`, `[1, 2, 3, *rest]`, etc: - [x] `[*rest]` is currently not working, but should be an easy fix - [x] Support any object which supports the [Sequence Protocol](https://docs.python.org/3/c-api/sequence.html) (need help with this) - [x] Mapping Pattern (`{"key": value}`): - [x] General support - [x] Starred patterns: `{"key": value, **rest}` - [x] Support any object which supports the [Mapping Protocol](https://docs.python.org/3/c-api/mapping.html) (need help with this) - [x] Class patterns: - [x] Basic class `isinstance()` check - [x] Positional args: `Class(1, 2, 3)` - [x] Keyword args: `Class(x=1, y=2, z=3)` - [x] Shortcut for built-in datatypes: `int(x)` -> `int() as x` - [x] Capture patterns: - [x] Wildcard pattern: `_` - [x] As pattern: `123 as num` - [x] Capture pattern: `x` Some features which I was unsure how to implement are: * Fix `*rest` and `**rest` star patterns name collisions. Basically, you cannot use `rest` (or any other name) twice in the same match statement if `rest` is a different type (ie, `dict` vs `list`). If it was defined as `object` instead of `dict`/`list` everything would be fine. Also some operations on native classes and primitive types could be optimized. --- mypyc/irbuild/classdef.py | 2 +- mypyc/irbuild/match.py | 355 ++++++ mypyc/irbuild/prepare.py | 6 +- mypyc/irbuild/statement.py | 7 + mypyc/irbuild/visitor.py | 3 +- mypyc/lib-rt/CPy.h | 2 + mypyc/lib-rt/dict_ops.c | 8 + mypyc/lib-rt/list_ops.c | 8 + mypyc/options.py | 2 + mypyc/primitives/dict_ops.py | 24 +- mypyc/primitives/list_ops.py | 21 + mypyc/test-data/irbuild-match.test | 1708 ++++++++++++++++++++++++++++ mypyc/test-data/run-match.test | 283 +++++ mypyc/test/test_irbuild.py | 4 + mypyc/test/test_run.py | 3 + mypyc/test/testutil.py | 3 +- 16 files changed, 2430 insertions(+), 9 deletions(-) create mode 100644 mypyc/irbuild/match.py create mode 100644 mypyc/test-data/irbuild-match.test create mode 100644 mypyc/test-data/run-match.test diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 4502c201a2e8..34fc1fd766b0 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -629,7 +629,7 @@ def find_attr_initializers( and not isinstance(stmt.rvalue, TempNode) ): name = stmt.lvalues[0].name - if name in ("__slots__", "__match_args__"): + if name == "__slots__": continue if name == "__deletable__": diff --git a/mypyc/irbuild/match.py b/mypyc/irbuild/match.py new file mode 100644 index 000000000000..a1e671911ea5 --- /dev/null +++ b/mypyc/irbuild/match.py @@ -0,0 +1,355 @@ +from contextlib import contextmanager +from typing import Generator, List, Optional, Tuple + +from mypy.nodes import MatchStmt, NameExpr, TypeInfo +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.traverser import TraverserVisitor +from mypy.types import Instance, TupleType, get_proper_type +from mypyc.ir.ops import BasicBlock, Value +from mypyc.ir.rtypes import object_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.primitives.dict_ops import ( + dict_copy, + dict_del_item, + mapping_has_key, + supports_mapping_protocol, +) +from mypyc.primitives.generic_ops import generic_ssize_t_len_op +from mypyc.primitives.list_ops import ( + sequence_get_item, + sequence_get_slice, + supports_sequence_protocol, +) +from mypyc.primitives.misc_ops import fast_isinstance_op, slow_isinstance_op + +# From: https://peps.python.org/pep-0634/#class-patterns +MATCHABLE_BUILTINS = { + "builtins.bool", + "builtins.bytearray", + "builtins.bytes", + "builtins.dict", + "builtins.float", + "builtins.frozenset", + "builtins.int", + "builtins.list", + "builtins.set", + "builtins.str", + "builtins.tuple", +} + + +class MatchVisitor(TraverserVisitor): + builder: IRBuilder + code_block: BasicBlock + next_block: BasicBlock + final_block: BasicBlock + subject: Value + match: MatchStmt + + as_pattern: Optional[AsPattern] = None + + def __init__(self, builder: IRBuilder, match_node: MatchStmt) -> None: + self.builder = builder + + self.code_block = BasicBlock() + self.next_block = BasicBlock() + self.final_block = BasicBlock() + + self.match = match_node + self.subject = builder.accept(match_node.subject) + + def build_match_body(self, index: int) -> None: + self.builder.activate_block(self.code_block) + + guard = self.match.guards[index] + + if guard: + self.code_block = BasicBlock() + + cond = self.builder.accept(guard) + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + + self.builder.accept(self.match.bodies[index]) + self.builder.goto(self.final_block) + + def visit_match_stmt(self, m: MatchStmt) -> None: + for i, pattern in enumerate(m.patterns): + self.code_block = BasicBlock() + self.next_block = BasicBlock() + + pattern.accept(self) + + self.build_match_body(i) + self.builder.activate_block(self.next_block) + + self.builder.goto_and_activate(self.final_block) + + def visit_value_pattern(self, pattern: ValuePattern) -> None: + value = self.builder.accept(pattern.expr) + + cond = self.builder.binary_op(self.subject, value, "==", pattern.expr.line) + + self.bind_as_pattern(value) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_or_pattern(self, pattern: OrPattern) -> None: + backup_block = self.next_block + self.next_block = BasicBlock() + + for p in pattern.patterns: + # Hack to ensure the as pattern is bound to each pattern in the + # "or" pattern, but not every subpattern + backup = self.as_pattern + p.accept(self) + self.as_pattern = backup + + self.builder.activate_block(self.next_block) + self.next_block = BasicBlock() + + self.next_block = backup_block + self.builder.goto(self.next_block) + + def visit_class_pattern(self, pattern: ClassPattern) -> None: + # TODO: use faster instance check for native classes (while still + # making sure to account for inheritence) + isinstance_op = ( + fast_isinstance_op + if self.builder.is_builtin_ref_expr(pattern.class_ref) + else slow_isinstance_op + ) + + cond = self.builder.call_c( + isinstance_op, [self.subject, self.builder.accept(pattern.class_ref)], pattern.line + ) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.bind_as_pattern(self.subject, new_block=True) + + if pattern.positionals: + if pattern.class_ref.fullname in MATCHABLE_BUILTINS: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + pattern.positionals[0].accept(self) + + return + + node = pattern.class_ref.node + assert isinstance(node, TypeInfo) + + ty = node.names.get("__match_args__") + assert ty + + match_args_type = get_proper_type(ty.type) + assert isinstance(match_args_type, TupleType) + + match_args: List[str] = [] + + for item in match_args_type.items: + proper_item = get_proper_type(item) + assert isinstance(proper_item, Instance) and proper_item.last_known_value + + match_arg = proper_item.last_known_value.value + assert isinstance(match_arg, str) + + match_args.append(match_arg) + + for i, expr in enumerate(pattern.positionals): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: use faster "get_attr" method instead when calling on native or + # builtin objects + positional = self.builder.py_get_attr(self.subject, match_args[i], expr.line) + + with self.enter_subpattern(positional): + expr.accept(self) + + for key, value in zip(pattern.keyword_keys, pattern.keyword_values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: same as above "get_attr" comment + attr = self.builder.py_get_attr(self.subject, key, value.line) + + with self.enter_subpattern(attr): + value.accept(self) + + def visit_as_pattern(self, pattern: AsPattern) -> None: + if pattern.pattern: + old_pattern = self.as_pattern + self.as_pattern = pattern + pattern.pattern.accept(self) + self.as_pattern = old_pattern + + elif pattern.name: + target = self.builder.get_assignment_target(pattern.name) + + self.builder.assign(target, self.subject, pattern.line) + + self.builder.goto(self.code_block) + + def visit_singleton_pattern(self, pattern: SingletonPattern) -> None: + if pattern.value is None: + obj = self.builder.none_object() + elif pattern.value is True: + obj = self.builder.true() + else: + obj = self.builder.false() + + cond = self.builder.binary_op(self.subject, obj, "is", pattern.line) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_mapping_pattern(self, pattern: MappingPattern) -> None: + is_dict = self.builder.call_c(supports_mapping_protocol, [self.subject], pattern.line) + + self.builder.add_bool_branch(is_dict, self.code_block, self.next_block) + + keys: List[Value] = [] + + for key, value in zip(pattern.keys, pattern.values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + key_value = self.builder.accept(key) + keys.append(key_value) + + exists = self.builder.call_c(mapping_has_key, [self.subject, key_value], pattern.line) + + self.builder.add_bool_branch(exists, self.code_block, self.next_block) + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + item = self.builder.gen_method_call( + self.subject, "__getitem__", [key_value], object_rprimitive, pattern.line + ) + + with self.enter_subpattern(item): + value.accept(self) + + if pattern.rest: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + rest = self.builder.call_c(dict_copy, [self.subject], pattern.rest.line) + + target = self.builder.get_assignment_target(pattern.rest) + + self.builder.assign(target, rest, pattern.rest.line) + + for i, key_name in enumerate(keys): + self.builder.call_c(dict_del_item, [rest, key_name], pattern.keys[i].line) + + self.builder.goto(self.code_block) + + def visit_sequence_pattern(self, seq_pattern: SequencePattern) -> None: + star_index, capture, patterns = prep_sequence_pattern(seq_pattern) + + is_list = self.builder.call_c(supports_sequence_protocol, [self.subject], seq_pattern.line) + + self.builder.add_bool_branch(is_list, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + actual_len = self.builder.call_c(generic_ssize_t_len_op, [self.subject], seq_pattern.line) + min_len = len(patterns) + + is_long_enough = self.builder.binary_op( + actual_len, + self.builder.load_int(min_len), + "==" if star_index is None else ">=", + seq_pattern.line, + ) + + self.builder.add_bool_branch(is_long_enough, self.code_block, self.next_block) + + for i, pattern in enumerate(patterns): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + if star_index is not None and i >= star_index: + current = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - i), "-", pattern.line + ) + + else: + current = self.builder.load_int(i) + + item = self.builder.call_c(sequence_get_item, [self.subject, current], pattern.line) + + with self.enter_subpattern(item): + pattern.accept(self) + + if capture and star_index is not None: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + capture_end = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - star_index), "-", capture.line + ) + + rest = self.builder.call_c( + sequence_get_slice, + [self.subject, self.builder.load_int(star_index), capture_end], + capture.line, + ) + + target = self.builder.get_assignment_target(capture) + self.builder.assign(target, rest, capture.line) + + self.builder.goto(self.code_block) + + def bind_as_pattern(self, value: Value, new_block: bool = False) -> None: + if self.as_pattern and self.as_pattern.pattern and self.as_pattern.name: + if new_block: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + target = self.builder.get_assignment_target(self.as_pattern.name) + self.builder.assign(target, value, self.as_pattern.pattern.line) + + self.as_pattern = None + + if new_block: + self.builder.goto(self.code_block) + + @contextmanager + def enter_subpattern(self, subject: Value) -> Generator[None, None, None]: + old_subject = self.subject + self.subject = subject + yield + self.subject = old_subject + + +def prep_sequence_pattern( + seq_pattern: SequencePattern, +) -> Tuple[Optional[int], Optional[NameExpr], List[Pattern]]: + star_index: Optional[int] = None + capture: Optional[NameExpr] = None + patterns: List[Pattern] = [] + + for i, pattern in enumerate(seq_pattern.patterns): + if isinstance(pattern, StarredPattern): + star_index = i + capture = pattern.capture + + else: + patterns.append(pattern) + + return star_index, capture, patterns diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 639d1a5ea0d1..2399647374c0 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -231,11 +231,7 @@ def prepare_class_def( if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name - if not node.node.is_classvar and name not in ( - "__slots__", - "__deletable__", - "__match_args__", - ): + if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): ir.attributes[name] = mapper.type_to_rtype(node.node.type) elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index a1d36c011aa1..6e465893607d 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -28,6 +28,7 @@ ImportFrom, ListExpr, Lvalue, + MatchStmt, OperatorAssignmentStmt, RaiseStmt, ReturnStmt, @@ -99,6 +100,8 @@ yield_from_except_op, ) +from .match import MatchVisitor + GenFunc = Callable[[], None] ValueGenFunc = Callable[[], Value] @@ -898,3 +901,7 @@ def transform_yield_from_expr(builder: IRBuilder, o: YieldFromExpr) -> Value: def transform_await_expr(builder: IRBuilder, o: AwaitExpr) -> Value: return emit_yield_from_or_await(builder, builder.accept(o.expr), o.line, is_await=True) + + +def transform_match_stmt(builder: IRBuilder, m: MatchStmt) -> None: + m.accept(MatchVisitor(builder, m)) diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index dc126d410409..d8725ee04dc5 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -131,6 +131,7 @@ transform_import, transform_import_all, transform_import_from, + transform_match_stmt, transform_operator_assignment_stmt, transform_raise_stmt, transform_return_stmt, @@ -242,7 +243,7 @@ def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: pass def visit_match_stmt(self, stmt: MatchStmt) -> None: - self.bail("Match statements are not yet supported", stmt.line) + transform_match_stmt(self.builder, stmt) # Expressions diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index cffbbb3e1666..166c851d0155 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -371,6 +371,7 @@ CPyTagged CPyList_Index(PyObject *list, PyObject *obj); PyObject *CPySequence_Multiply(PyObject *seq, CPyTagged t_size); PyObject *CPySequence_RMultiply(CPyTagged t_size, PyObject *seq); PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +int CPySequence_Check(PyObject *obj); // Dict operations @@ -402,6 +403,7 @@ PyObject *CPyDict_GetValuesIter(PyObject *dict); tuple_T3CIO CPyDict_NextKey(PyObject *dict_or_iter, CPyTagged offset); tuple_T3CIO CPyDict_NextValue(PyObject *dict_or_iter, CPyTagged offset); tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset); +int CPyMapping_Check(PyObject *obj); // Check that dictionary didn't change size during iteration. static inline char CPyDict_CheckSize(PyObject *dict, CPyTagged size) { diff --git a/mypyc/lib-rt/dict_ops.c b/mypyc/lib-rt/dict_ops.c index b013a8a5f0b9..ba565257fd72 100644 --- a/mypyc/lib-rt/dict_ops.c +++ b/mypyc/lib-rt/dict_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_MAPPING +#define Py_TPFLAGS_MAPPING (1 << 6) +#endif + // Dict subclasses like defaultdict override things in interesting // ways, so we don't want to just directly use the dict methods. Not // sure if it is actually worth doing all this stuff, but it saves @@ -436,3 +440,7 @@ tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset) { Py_INCREF(ret.f3); return ret; } + +int CPyMapping_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MAPPING; +} diff --git a/mypyc/lib-rt/list_ops.c b/mypyc/lib-rt/list_ops.c index cb72662e22ee..df87228a0d10 100644 --- a/mypyc/lib-rt/list_ops.c +++ b/mypyc/lib-rt/list_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_SEQUENCE +#define Py_TPFLAGS_SEQUENCE (1 << 5) +#endif + PyObject *CPyList_Build(Py_ssize_t len, ...) { Py_ssize_t i; @@ -325,3 +329,7 @@ PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { } return CPyObject_GetSlice(obj, start, end); } + +int CPySequence_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_SEQUENCE; +} diff --git a/mypyc/options.py b/mypyc/options.py index d554cbed164f..5f0cf12aeefe 100644 --- a/mypyc/options.py +++ b/mypyc/options.py @@ -13,6 +13,7 @@ def __init__( target_dir: str | None = None, include_runtime_files: bool | None = None, capi_version: tuple[int, int] | None = None, + python_version: tuple[int, int] | None = None, ) -> None: self.strip_asserts = strip_asserts self.multi_file = multi_file @@ -28,3 +29,4 @@ def __init__( # binaries are backward compatible even if no recent API # features are used. self.capi_version = capi_version or sys.version_info[:2] + self.python_version = python_version diff --git a/mypyc/primitives/dict_ops.py b/mypyc/primitives/dict_ops.py index d1dca5a79e63..9f477d0b7b90 100644 --- a/mypyc/primitives/dict_ops.py +++ b/mypyc/primitives/dict_ops.py @@ -63,7 +63,7 @@ ) # Generic one-argument dict constructor: dict(obj) -function_op( +dict_copy = function_op( name="builtins.dict", arg_types=[object_rprimitive], return_type=dict_rprimitive, @@ -301,3 +301,25 @@ c_function_name="PyDict_Size", error_kind=ERR_NEVER, ) + +# Delete an item from a dict +dict_del_item = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_DelItem", + error_kind=ERR_NEG_INT, +) + +supports_mapping_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyMapping_Check", + error_kind=ERR_NEVER, +) + +mapping_has_key = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyMapping_HasKey", + error_kind=ERR_NEVER, +) diff --git a/mypyc/primitives/list_ops.py b/mypyc/primitives/list_ops.py index c729e264fc14..7fe3157f3a38 100644 --- a/mypyc/primitives/list_ops.py +++ b/mypyc/primitives/list_ops.py @@ -277,3 +277,24 @@ c_function_name="CPyList_GetSlice", error_kind=ERR_MAGIC, ) + +supports_sequence_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPySequence_Check", + error_kind=ERR_NEVER, +) + +sequence_get_item = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetItem", + error_kind=ERR_NEVER, +) + +sequence_get_slice = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetSlice", + error_kind=ERR_MAGIC, +) diff --git a/mypyc/test-data/irbuild-match.test b/mypyc/test-data/irbuild-match.test new file mode 100644 index 000000000000..2afe3d862f51 --- /dev/null +++ b/mypyc/test-data/irbuild-match.test @@ -0,0 +1,1708 @@ +[case testMatchValuePattern_python3_10] +def f(): + match 123: + case 123: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchOrPattern_python3_10] +def f(): + match 123: + case 123 | 456: + print("matched") +[out] +def f(): + r0, r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = 246 == 246 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = 246 == 912 + if r1 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L5 +L4: +L5: + r9 = box(None, 1) + return r9 +[case testMatchOrPatternManyPatterns_python3_10] +def f(): + match 1: + case 1 | 2 | 3 | 4: + print("matched") +[out] +def f(): + r0, r1, r2, r3 :: bit + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L5 else goto L1 :: bool +L1: + r1 = 2 == 4 + if r1 goto L5 else goto L2 :: bool +L2: + r2 = 2 == 6 + if r2 goto L5 else goto L3 :: bool +L3: + r3 = 2 == 8 + if r3 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L7 +L6: +L7: + r11 = box(None, 1) + return r11 +[case testMatchClassPattern_python3_10] +def f(): + match 123: + case int(): + print("matched") +[out] +def f(): + r0, r1 :: object + r2 :: bool + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L3 +L2: +L3: + r10 = box(None, 1) + return r10 +[case testMatchExaustivePattern_python3_10] +def f(): + match 123: + case _: + print("matched") +[out] +def f(): + r0 :: str + r1 :: object + r2 :: str + r3 :: object + r4 :: object[1] + r5 :: object_ptr + r6, r7 :: object +L0: +L1: + r0 = 'matched' + r1 = builtins :: module + r2 = 'print' + r3 = CPyObject_GetAttr(r1, r2) + r4 = [r0] + r5 = load_address r4 + r6 = _PyObject_Vectorcall(r3, r5, 1, 0) + keep_alive r0 + goto L3 +L2: +L3: + r7 = box(None, 1) + return r7 +[case testMatchMultipleBodies_python3_10] +def f(): + match 123: + case 123: + print("matched") + case 456: + print("no match") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8 :: bit + r9 :: str + r10 :: object + r11 :: str + r12 :: object + r13 :: object[1] + r14 :: object_ptr + r15, r16 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L5 +L2: + r8 = 246 == 912 + if r8 goto L3 else goto L4 :: bool +L3: + r9 = 'no match' + r10 = builtins :: module + r11 = 'print' + r12 = CPyObject_GetAttr(r10, r11) + r13 = [r9] + r14 = load_address r13 + r15 = _PyObject_Vectorcall(r12, r14, 1, 0) + keep_alive r9 + goto L5 +L4: +L5: + r16 = box(None, 1) + return r16 +[case testMatchMultiBodyAndComplexOr_python3_10] +def f(): + match 123: + case 1: + print("here 1") + case 2 | 3: + print("here 2 | 3") + case 123: + print("here 123") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8, r9 :: bit + r10 :: str + r11 :: object + r12 :: str + r13 :: object + r14 :: object[1] + r15 :: object_ptr + r16 :: object + r17 :: bit + r18 :: str + r19 :: object + r20 :: str + r21 :: object + r22 :: object[1] + r23 :: object_ptr + r24, r25 :: object +L0: + r0 = 246 == 2 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'here 1' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L9 +L2: + r8 = 246 == 4 + if r8 goto L5 else goto L3 :: bool +L3: + r9 = 246 == 6 + if r9 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r10 = 'here 2 | 3' + r11 = builtins :: module + r12 = 'print' + r13 = CPyObject_GetAttr(r11, r12) + r14 = [r10] + r15 = load_address r14 + r16 = _PyObject_Vectorcall(r13, r15, 1, 0) + keep_alive r10 + goto L9 +L6: + r17 = 246 == 246 + if r17 goto L7 else goto L8 :: bool +L7: + r18 = 'here 123' + r19 = builtins :: module + r20 = 'print' + r21 = CPyObject_GetAttr(r19, r20) + r22 = [r18] + r23 = load_address r22 + r24 = _PyObject_Vectorcall(r21, r23, 1, 0) + keep_alive r18 + goto L9 +L8: +L9: + r25 = box(None, 1) + return r25 +[case testMatchWithGuard_python3_10] +def f(): + match 123: + case 123 if True: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L3 :: bool +L1: + if 1 goto L2 else goto L3 :: bool +L2: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L4 +L3: +L4: + r8 = box(None, 1) + return r8 +[case testMatchSingleton_python3_10] +def f(): + match 123: + case True: + print("value is True") + case False: + print("value is False") + case None: + print("value is None") +[out] +def f(): + r0, r1 :: object + r2 :: bit + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10, r11 :: object + r12 :: bit + r13 :: str + r14 :: object + r15 :: str + r16 :: object + r17 :: object[1] + r18 :: object_ptr + r19, r20, r21 :: object + r22 :: bit + r23 :: str + r24 :: object + r25 :: str + r26 :: object + r27 :: object[1] + r28 :: object_ptr + r29, r30 :: object +L0: + r0 = object 123 + r1 = box(bool, 1) + r2 = r0 == r1 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'value is True' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L7 +L2: + r10 = object 123 + r11 = box(bool, 0) + r12 = r10 == r11 + if r12 goto L3 else goto L4 :: bool +L3: + r13 = 'value is False' + r14 = builtins :: module + r15 = 'print' + r16 = CPyObject_GetAttr(r14, r15) + r17 = [r13] + r18 = load_address r17 + r19 = _PyObject_Vectorcall(r16, r18, 1, 0) + keep_alive r13 + goto L7 +L4: + r20 = load_address _Py_NoneStruct + r21 = object 123 + r22 = r21 == r20 + if r22 goto L5 else goto L6 :: bool +L5: + r23 = 'value is None' + r24 = builtins :: module + r25 = 'print' + r26 = CPyObject_GetAttr(r24, r25) + r27 = [r23] + r28 = load_address r27 + r29 = _PyObject_Vectorcall(r26, r28, 1, 0) + keep_alive r23 + goto L7 +L6: +L7: + r30 = box(None, 1) + return r30 +[case testMatchRecursiveOrPattern_python3_10] +def f(): + match 1: + case 1 | int(): + print("matched") +[out] +def f(): + r0 :: bit + r1, r2 :: object + r3 :: bool + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = load_address PyLong_Type + r2 = object 1 + r3 = CPy_TypeCheck(r2, r1) + if r3 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L5 +L4: +L5: + r11 = box(None, 1) + return r11 +[case testMatchAsPattern_python3_10] +def f(): + match 123: + case 123 as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x, r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + r1 = object 123 + x = r1 + if r0 goto L1 else goto L2 :: bool +L1: + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [x] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive x + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchAsPatternOnOrPattern_python3_10] +def f(): + match 1: + case (1 | 2) as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x :: object + r2 :: bit + r3, r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = 2 == 2 + r1 = object 1 + x = r1 + if r0 goto L3 else goto L1 :: bool +L1: + r2 = 2 == 4 + r3 = object 2 + x = r3 + if r2 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [x] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive x + goto L5 +L4: +L5: + r10 = box(None, 1) + return r10 +[case testMatchAsPatternOnClassPattern_python3_10] +def f(): + match 123: + case int() as i: + print(i) +[out] +def f(): + r0, r1 :: object + r2 :: bool + i :: int + r3 :: object + r4 :: str + r5, r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L3 :: bool +L1: + i = 246 +L2: + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = box(int, i) + r7 = [r6] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r5, r8, 1, 0) + keep_alive r6 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchClassPatternWithPositionalArgs_python3_10] +class Position: + __match_args__ = ("x", "y", "z") + + x: int + y: int + z: int + +def f(x): + match x: + case Position(1, 2, 3): + print("matched") +[out] +def Position.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.Position + r0, r1, r2 :: str + r3 :: tuple[str, str, str] +L0: + r0 = 'x' + r1 = 'y' + r2 = 'z' + r3 = (r0, r1, r2) + __mypyc_self__.__match_args__ = r3 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'z' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithKeywordPatterns_python3_10] +class Position: + x: int + y: int + z: int + +def f(x): + match x: + case Position(z=1, y=2, x=3): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'z' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'x' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithNestedPattern_python3_10] +class C: + num: int + +def f(x): + match x: + case C(num=1 | 2): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12 :: object + r13 :: int32 + r14 :: bit + r15 :: bool + r16 :: str + r17 :: object + r18 :: str + r19 :: object + r20 :: object[1] + r21 :: object_ptr + r22, r23 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'num' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L4 else goto L2 :: bool +L2: + r11 = object 2 + r12 = PyObject_RichCompare(r5, r11, 2) + r13 = PyObject_IsTrue(r12) + r14 = r13 >= 0 :: signed + r15 = truncate r13: int32 to builtins.bool + if r15 goto L4 else goto L3 :: bool +L3: + goto L5 +L4: + r16 = 'matched' + r17 = builtins :: module + r18 = 'print' + r19 = CPyObject_GetAttr(r17, r18) + r20 = [r16] + r21 = load_address r20 + r22 = _PyObject_Vectorcall(r19, r21, 1, 0) + keep_alive r16 + goto L6 +L5: +L6: + r23 = box(None, 1) + return r23 +[case testAsPatternDoesntBleedIntoSubPatterns_python3_10] +class C: + __match_args__ = ("a", "b") + a: int + b: int + +def f(x): + match x: + case C(1, 2) as y: + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0, r1 :: str + r2 :: tuple[str, str] +L0: + r0 = 'a' + r1 = 'b' + r2 = (r0, r1) + __mypyc_self__.__match_args__ = r2 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4, y :: __main__.C + r5 :: str + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: str + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = cast(__main__.C, x) + y = r4 +L2: + r5 = 'a' + r6 = CPyObject_GetAttr(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L5 :: bool +L3: + r12 = 'b' + r13 = CPyObject_GetAttr(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L5 :: bool +L4: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L6 +L5: +L6: + r26 = box(None, 1) + return r26 +[case testMatchClassPatternPositionalCapture_python3_10] +class C: + __match_args__ = ("x",) + + x: int + +def f(x): + match x: + case C(num): + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0 :: str + r1 :: tuple[str] +L0: + r0 = 'x' + r1 = (r0) + __mypyc_self__.__match_args__ = r1 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, num :: int + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + num = r6 +L2: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L4 +L3: +L4: + r14 = box(None, 1) + return r14 +[case testMatchMappingEmpty_python3_10] +def f(x): + match x: + case {}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L3 +L2: +L3: + r9 = box(None, 1) + return r9 +[case testMatchMappingPatternWithKeys_python3_10] +def f(x): + match x: + case {"key": "value"}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12 :: object + r13 :: str + r14 :: object + r15 :: object[1] + r16 :: object_ptr + r17, r18 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L4 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L4 :: bool +L3: + r11 = 'matched' + r12 = builtins :: module + r13 = 'print' + r14 = CPyObject_GetAttr(r12, r13) + r15 = [r11] + r16 = load_address r15 + r17 = _PyObject_Vectorcall(r14, r16, 1, 0) + keep_alive r11 + goto L5 +L4: +L5: + r18 = box(None, 1) + return r18 +[case testMatchMappingPatternWithRest_python3_10] +def f(x): + match x: + case {**rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2, rest :: dict + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = CPyDict_FromAny(x) + rest = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchMappingPatternWithRestPopKeys_python3_10] +def f(x): + match x: + case {"key": "value", **rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, rest :: dict + r12 :: int32 + r13 :: bit + r14 :: str + r15 :: object + r16 :: str + r17 :: object + r18 :: object[1] + r19 :: object_ptr + r20, r21 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = CPyDict_FromAny(x) + rest = r11 + r12 = PyDict_DelItem(r11, r2) + r13 = r12 >= 0 :: signed +L4: + r14 = 'matched' + r15 = builtins :: module + r16 = 'print' + r17 = CPyObject_GetAttr(r15, r16) + r18 = [r14] + r19 = load_address r18 + r20 = _PyObject_Vectorcall(r17, r19, 1, 0) + keep_alive r14 + goto L6 +L5: +L6: + r21 = box(None, 1) + return r21 +[case testMatchEmptySequencePattern_python3_10] +def f(x): + match x: + case []: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: str + r6 :: object + r7 :: str + r8 :: object + r9 :: object[1] + r10 :: object_ptr + r11, r12 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 0 + if r4 goto L2 else goto L3 :: bool +L2: + r5 = 'matched' + r6 = builtins :: module + r7 = 'print' + r8 = CPyObject_GetAttr(r6, r7) + r9 = [r5] + r10 = load_address r9 + r11 = _PyObject_Vectorcall(r8, r10, 1, 0) + keep_alive r5 + goto L4 +L3: +L4: + r12 = box(None, 1) + return r12 +[case testMatchFixedLengthSequencePattern_python3_10] +def f(x): + match x: + case [1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 2 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingUnboundStar_python3_10] +def f(x): + match x: + case [1, 2, *_]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingBoundStar_python3_10] +def f(x): + match x: + case [1, 2, *rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: native_int + r18, rest :: object + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L6 :: bool +L4: + r17 = r2 - 0 + r18 = PySequence_GetSlice(x, 2, r17) + rest = r18 +L5: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L7 +L6: +L7: + r26 = box(None, 1) + return r26 +[case testMatchSequenceWithStarPatternInTheMiddle_python3_10] +def f(x): + match x: + case ["start", *rest, "end"]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: native_int + r12 :: object + r13 :: str + r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: native_int + r19, rest :: object + r20 :: str + r21 :: object + r22 :: str + r23 :: object + r24 :: object[1] + r25 :: object_ptr + r26, r27 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = 'start' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = r2 - 1 + r12 = PySequence_GetItem(x, r11) + r13 = 'end' + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L4 else goto L6 :: bool +L4: + r18 = r2 - 1 + r19 = PySequence_GetSlice(x, 1, r18) + rest = r19 +L5: + r20 = 'matched' + r21 = builtins :: module + r22 = 'print' + r23 = CPyObject_GetAttr(r21, r22) + r24 = [r20] + r25 = load_address r24 + r26 = _PyObject_Vectorcall(r23, r25, 1, 0) + keep_alive r20 + goto L7 +L6: +L7: + r27 = box(None, 1) + return r27 +[case testMatchSequenceWithStarPatternAtTheStart_python3_10] +def f(x): + match x: + case [*rest, 1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: native_int + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: native_int + r20, rest :: object + r21 :: str + r22 :: object + r23 :: str + r24 :: object + r25 :: object[1] + r26 :: object_ptr + r27, r28 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = r2 - 2 + r6 = PySequence_GetItem(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L6 :: bool +L3: + r12 = r2 - 1 + r13 = PySequence_GetItem(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L6 :: bool +L4: + r19 = r2 - 2 + r20 = PySequence_GetSlice(x, 0, r19) + rest = r20 +L5: + r21 = 'matched' + r22 = builtins :: module + r23 = 'print' + r24 = CPyObject_GetAttr(r22, r23) + r25 = [r21] + r26 = load_address r25 + r27 = _PyObject_Vectorcall(r24, r26, 1, 0) + keep_alive r21 + goto L7 +L6: +L7: + r28 = box(None, 1) + return r28 +[case testMatchBuiltinClassPattern_python3_10] +def f(x): + match x: + case int(y): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: bool + r2, y :: int + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = CPy_TypeCheck(x, r0) + if r1 goto L1 else goto L3 :: bool +L1: + r2 = unbox(int, x) + y = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchSequenceCaptureAll_python3_10] +def f(x): + match x: + case [*rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, rest :: object + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 0 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = r2 - 0 + r6 = PySequence_GetSlice(x, 0, r5) + rest = r6 +L3: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L5 +L4: +L5: + r14 = box(None, 1) + return r14 +[case testMatchTypeAnnotatedNativeClass_python3_10] +class A: + a: int + +def f(x: A | int) -> int: + match x: + case A(a=a): + return a + case int(): + return x +[out] +def f(x): + x :: union[__main__.A, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, a :: int + r7 :: object + r8 :: bool + r9 :: int +L0: + r0 = __main__.A :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'a' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + a = r6 +L2: + return a +L3: + r7 = load_address PyLong_Type + r8 = CPy_TypeCheck(x, r7) + if r8 goto L4 else goto L5 :: bool +L4: + r9 = unbox(int, x) + return r9 +L5: +L6: + unreachable diff --git a/mypyc/test-data/run-match.test b/mypyc/test-data/run-match.test new file mode 100644 index 000000000000..7b7ad9a4342c --- /dev/null +++ b/mypyc/test-data/run-match.test @@ -0,0 +1,283 @@ +[case testTheBigMatch_python3_10] +class Person: + __match_args__ = ("name", "age") + + name: str + age: int + + def __init__(self, name: str, age: int) -> None: + self.name = name + self.age = age + + def __str__(self) -> str: + return f"Person(name={self.name!r}, age={self.age})" + + +def f(x: object) -> None: + match x: + case 123: + print("test 1") + + case 456 | 789: + print("test 2") + + case True | False | None: + print("test 3") + + case Person("bob" as name, age): + print(f"test 4 ({name=}, {age=})") + + case num if num == 5: + print("test 5") + + case 6 as num: + print(f"test 6 ({num=})") + + case (7 | "7") as value: + print(f"test 7 ({value=})") + + case Person("alice", age=123): + print("test 8") + + case Person("charlie", age=123 | 456): + print("test 9") + + case Person("dave", 123) as dave: + print(f"test 10 {dave}") + + case {"test": 11}: + print("test 11") + + case {"test": 12, **rest}: + print(f"test 12 (rest={rest})") + + case {}: + print("test map final") + + case ["test", 13]: + print("test 13") + + case ["test", 13, _]: + print("test 13b") + + case ["test", 14, *_]: + print("test 14") + + # TODO: Fix "rest" being used here coliding with above "rest" + case ["test", 15, *rest2]: + print(f"test 15 ({rest2})") + + case ["test", *rest3, 16]: + print(f"test 16 ({rest3})") + + case [*rest4, "test", 17]: + print(f"test 17 ({rest4})") + + case [*rest4, "test", 18, "some", "fluff"]: + print(f"test 18 ({rest4})") + + case str("test 19"): + print("test 19") + + case str(test_20) if test_20.startswith("test 20"): + print(f"test 20 ({test_20[7:]!r})") + + case ("test 21" as value) | ("test 21 as well" as value): + print(f"test 21 ({value[7:]!r})") + + case []: + print("test sequence final") + + case _: + print("test final") +[file driver.py] +from native import f, Person + +# test 1 +f(123) + +# test 2 +f(456) +f(789) + +# test 3 +f(True) +f(False) +f(None) + +# test 4 +f(Person("bob", 123)) + +# test 5 +f(5) + +# test 6 +f(6) + +# test 7 +f(7) +f("7") + +# test 8 +f(Person("alice", 123)) + +# test 9 +f(Person("charlie", 123)) +f(Person("charlie", 456)) + +# test 10 +f(Person("dave", 123)) + +# test 11 +f({"test": 11}) +f({"test": 11, "some": "key"}) + +# test 12 +f({"test": 12}) +f({"test": 12, "key": "value"}) +f({"test": 12, "key": "value", "abc": "123"}) + +# test map final +f({}) + +# test 13 +f(["test", 13]) + +# test 13b +f(["test", 13, "fail"]) + +# test 14 +f(["test", 14]) +f(["test", 14, "something"]) + +# test 15 +f(["test", 15]) +f(["test", 15, "something"]) + +# test 16 +f(["test", 16]) +f(["test", "filler", 16]) +f(["test", "more", "filler", 16]) + +# test 17 +f(["test", 17]) +f(["stuff", "test", 17]) +f(["more", "stuff", "test", 17]) + +# test 18 +f(["test", 18, "some", "fluff"]) +f(["stuff", "test", 18, "some", "fluff"]) +f(["more", "stuff", "test", 18, "some", "fluff"]) + +# test 19 +f("test 19") + +# test 20 +f("test 20") +f("test 20 something else") + +# test 21 +f("test 21") +f("test 21 as well") + +# test sequence final +f([]) + +# test final +f("") + +[out] +test 1 +test 2 +test 2 +test 3 +test 3 +test 3 +test 4 (name='bob', age=123) +test 5 +test 6 (num=6) +test 7 (value=7) +test 7 (value='7') +test 8 +test 9 +test 9 +test 10 Person(name='dave', age=123) +test 11 +test 11 +test 12 (rest={}) +test 12 (rest={'key': 'value'}) +test 12 (rest={'key': 'value', 'abc': '123'}) +test map final +test 13 +test 13b +test 14 +test 14 +test 15 ([]) +test 15 (['something']) +test 16 ([]) +test 16 (['filler']) +test 16 (['more', 'filler']) +test 17 ([]) +test 17 (['stuff']) +test 17 (['more', 'stuff']) +test 18 ([]) +test 18 (['stuff']) +test 18 (['more', 'stuff']) +test 19 +test 20 ('') +test 20 (' something else') +test 21 ('') +test 21 (' as well') +test sequence final +test final +[case testCustomMappingAndSequenceObjects_python3_10] +def f(x: object) -> None: + match x: + case {"key": "value", **rest}: + print(rest, type(rest)) + + case [1, 2, *rest2]: + print(rest2, type(rest2)) + +[file driver.py] +from collections.abc import Mapping, Sequence + +from native import f + +class CustomMapping(Mapping): + inner: dict + + def __init__(self, inner: dict) -> None: + self.inner = inner + + def __getitem__(self, key): + return self.inner[key] + + def __iter__(self): + return iter(self.inner) + + def __len__(self) -> int: + return len(self.inner) + + +class CustomSequence(Sequence): + inner: list + + def __init__(self, inner: list) -> None: + self.inner = inner + + def __getitem__(self, index: int) -> None: + return self.inner[index] + + def __len__(self) -> int: + return len(self.inner) + +mapping = CustomMapping({"key": "value", "some": "data"}) +sequence = CustomSequence([1, 2, 3]) + +f(mapping) +f(sequence) + +[out] +{'some': 'data'} +[3] diff --git a/mypyc/test/test_irbuild.py b/mypyc/test/test_irbuild.py index 00a8c074da87..8928f94d6211 100644 --- a/mypyc/test/test_irbuild.py +++ b/mypyc/test/test_irbuild.py @@ -3,6 +3,7 @@ from __future__ import annotations import os.path +import sys from mypy.errors import CompileError from mypy.test.config import test_temp_dir @@ -48,6 +49,9 @@ "irbuild-glue-methods.test", ] +if sys.version_info >= (3, 10): + files.append("irbuild-match.test") + class TestGenOps(MypycDataSuite): files = files diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 351caf7c93ed..fff775ebfab5 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -68,6 +68,9 @@ if sys.version_info >= (3, 8): files.append("run-python38.test") +if sys.version_info >= (3, 10): + files.append("run-match.test") + setup_format = """\ from setuptools import setup from mypyc.build import mypycify diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 8339889fa9f5..609ffc27385e 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -108,7 +108,7 @@ def build_ir_for_single_file2( options.hide_error_codes = True options.use_builtins_fixtures = True options.strict_optional = True - options.python_version = (3, 6) + options.python_version = compiler_options.python_version or (3, 6) options.export_types = True options.preserve_asts = True options.allow_empty_bodies = True @@ -277,6 +277,7 @@ def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None: m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name) if m: options.capi_version = (int(m.group(1)), int(m.group(2))) + options.python_version = options.capi_version elif "_py" in name or "_Python" in name: assert False, f"Invalid _py* suffix (should be _pythonX_Y): {name}" return options From b8c03ab6809aab56928f3cd865edb44944a600a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A9ry=20Ogam?= Date: Sat, 3 Dec 2022 00:37:52 +0100 Subject: [PATCH 101/292] Fix incorrect class names in literal_types.rst (#14237) --- docs/source/literal_types.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst index 7195ccc2b69b..a66d300bd0fd 100644 --- a/docs/source/literal_types.rst +++ b/docs/source/literal_types.rst @@ -495,13 +495,13 @@ the same way Python's runtime does: ... right = 'right' Traceback (most recent call last): ... - TypeError: Other: cannot extend enumeration 'Some' + TypeError: AllDirection: cannot extend enumeration 'Direction' Mypy also catches this error: .. code-block:: python - class AllDirection(Direction): # E: Cannot inherit from final class "Some" + class AllDirection(Direction): # E: Cannot inherit from final class "Direction" left = 'left' right = 'right' From 734a0b96b79f20b3290ca5fee97c749ccb2e308f Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 4 Dec 2022 14:50:48 +0000 Subject: [PATCH 102/292] Flycheck-mypy is deprecated. Its functionality was merged in Flycheck (#14247) See https://github.com/lbolla/emacs-flycheck-mypy#readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 95cacb05d682..dd09d15ff152 100644 --- a/README.md +++ b/README.md @@ -124,7 +124,7 @@ Mypy can be integrated into popular IDEs: `let g:syntastic_python_checkers=['mypy']` * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` -* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) +* Emacs: using [Flycheck](https://github.com/flycheck/) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) * PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates From 7785b6035e919180593f027305171e4a844a947b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 4 Dec 2022 18:47:57 +0000 Subject: [PATCH 103/292] Stop saying mypy is beta software (#14251) Ref #13685 cc @Michael0x2a @JukkaL @hauntsaninja Co-authored-by: Alex Waygood --- README.md | 9 +-------- docs/source/index.rst | 6 +++--- setup.py | 2 +- 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index dd09d15ff152..a8b26f7abd62 100644 --- a/README.md +++ b/README.md @@ -160,14 +160,7 @@ To get started with developing mypy, see [CONTRIBUTING.md](CONTRIBUTING.md). If you need help getting started, don't hesitate to ask on [gitter](https://gitter.im/python/typing). -Development status ------------------- - -Mypy is beta software, but it has already been used in production -for several years at Dropbox and in many other organizations, and -it has an extensive test suite. - -mypyc and compiled version of mypy +Mypyc and compiled version of mypy ---------------------------------- [Mypyc](https://github.com/mypyc/mypyc) uses Python type hints to compile Python diff --git a/docs/source/index.rst b/docs/source/index.rst index 1f77e951843d..27b3a078af6c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -39,10 +39,10 @@ understand, debug, and maintain. .. note:: - Mypy is used in production by many companies and projects, but mypy is - officially beta software. There will be occasional changes + Although mypy is production ready, there will be occasional changes that break backward compatibility. The mypy development team tries to - minimize the impact of changes to user code. + minimize the impact of changes to user code. In case of a major breaking + change, mypy's major version will be bumped. Contents -------- diff --git a/setup.py b/setup.py index 669e0cc4b615..a148237f0b95 100644 --- a/setup.py +++ b/setup.py @@ -171,7 +171,7 @@ def run(self): classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", From 0885d85a7b224cefb56fe63a75bb29ab73a23183 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 5 Dec 2022 03:57:07 +0000 Subject: [PATCH 104/292] Add link to error codes in README (#14249) Several issues that I have seen recently could be fixed by enabling/disabling an error code. I think we should attract more attention to error code customization (especially for new users). --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a8b26f7abd62..2ec4be4f0dee 100644 --- a/README.md +++ b/README.md @@ -84,6 +84,7 @@ more examples and information. In particular, see: - [type hints cheat sheet](https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html) - [getting started](https://mypy.readthedocs.io/en/stable/getting_started.html) +- [list of error codes](https://mypy.readthedocs.io/en/stable/error_code_list.html) Quick start ----------- From 924bc68b07bec2b72ce055f6cd95f6aea11fda7e Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 5 Dec 2022 03:57:23 +0000 Subject: [PATCH 105/292] Advertise mypy daemon in README (#14248) Now that daemon is more mature and stable I think we can attract some more attention to it. --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2ec4be4f0dee..01d876ead3e4 100644 --- a/README.md +++ b/README.md @@ -110,9 +110,13 @@ programs, even if mypy reports type errors: python3 PROGRAM You can also try mypy in an [online playground](https://mypy-play.net/) (developed by -Yusuke Miyazaki). +Yusuke Miyazaki). If you are working with large code bases, you can run mypy in +[daemon mode], that will give much faster (often sub-second) incremental updates: + + dmypy run -- PROGRAM [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing +[daemon-mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html Integrations From d2ab2e7dd1a4a43b0cb6626a63d722387f53b7bc Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 6 Dec 2022 10:52:07 +0300 Subject: [PATCH 106/292] Allow trailing commas in `ini` configuration of multiline values (#14240) Now these two samples are identical: ```ini [mypy] enable_error_code = truthy-bool, redundant-expr, unused-awaitable, ignore-without-code ``` and ```ini [mypy] enable_error_code = truthy-bool, redundant-expr, unused-awaitable, ignore-without-code, ``` I've covered some of the changed values, but not all (they are identical - no need to create so many slow tests). I've also checked `pyproject.toml`. It does not have this problem: it uses `[]` to create arrays, so no trailing commas are used there. --- mypy/config_parser.py | 23 +++++++++++++++------- test-data/unit/check-custom-plugin.test | 9 +++++++++ test-data/unit/cmdline.test | 26 +++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 485d2f67f5de..190782a3bded 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -137,6 +137,15 @@ def check_follow_imports(choice: str) -> str: return choice +def split_commas(value: str) -> list[str]: + # Uses a bit smarter technique to allow last trailing comma + # and to remove last `""` item from the split. + items = value.split(",") + if items and items[-1] == "": + items.pop(-1) + return items + + # For most options, the type of the default value set in options.py is # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container @@ -151,13 +160,13 @@ def check_follow_imports(choice: str) -> str: "junit_xml": expand_path, "follow_imports": check_follow_imports, "no_site_packages": bool, - "plugins": lambda s: [p.strip() for p in s.split(",")], - "always_true": lambda s: [p.strip() for p in s.split(",")], - "always_false": lambda s: [p.strip() for p in s.split(",")], - "enable_incomplete_feature": lambda s: [p.strip() for p in s.split(",")], - "disable_error_code": lambda s: validate_codes([p.strip() for p in s.split(",")]), - "enable_error_code": lambda s: validate_codes([p.strip() for p in s.split(",")]), - "package_root": lambda s: [p.strip() for p in s.split(",")], + "plugins": lambda s: [p.strip() for p in split_commas(s)], + "always_true": lambda s: [p.strip() for p in split_commas(s)], + "always_false": lambda s: [p.strip() for p in split_commas(s)], + "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)], + "disable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "enable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "package_root": lambda s: [p.strip() for p in split_commas(s)], "cache_dir": expand_path, "python_executable": expand_path, "strict": bool, diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index a716109d345e..d7beea0390e7 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -163,6 +163,15 @@ reveal_type(f()) # N: Revealed type is "builtins.int" \[mypy] plugins=/test-data/unit/plugins/customentry.py:register +[case testCustomPluginEntryPointFileTrailingComma] +# flags: --config-file tmp/mypy.ini +def f() -> str: ... +reveal_type(f()) # N: Revealed type is "builtins.int" +[file mypy.ini] +\[mypy] +plugins = + /test-data/unit/plugins/customentry.py:register, + [case testCustomPluginEntryPoint] # flags: --config-file tmp/mypy.ini def f() -> str: ... diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 92b0af6942bc..9eba9ea1e906 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1570,3 +1570,29 @@ foo.py:1: error: "int" not callable 1() [out] foo/m.py:1: error: "int" not callable + +[case testCmdlineCfgEnableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +enable_error_code = + truthy-bool, + redundant-expr, +[out] + +[case testCmdlineCfgDisableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +disable_error_code = + misc, + override, +[out] + +[case testCmdlineCfgAlwaysTrueTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +always_true = + MY_VAR, +[out] From 7e9aa7433c72e0950229a34e0c086d7829208f06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tin=20Tvrtkovi=C4=87?= Date: Tue, 6 Dec 2022 15:42:28 +0100 Subject: [PATCH 107/292] Enable Final instance attributes for attrs (#14232) A quick patch to enable the following scenario: ```python @define class C: a: Final[int] # `a` is a final instance attribute ``` There are some edge cases I haven't covered here that would be complex to handle and not add much value IMO, so I think this will be useful like this. --- mypy/plugins/attrs.py | 14 +++++++++++--- test-data/unit/check-attr.test | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index ce0f45967152..16e8891e5f57 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -3,7 +3,7 @@ from __future__ import annotations from typing import Iterable, List, cast -from typing_extensions import Final +from typing_extensions import Final, Literal import mypy.plugin # To avoid circular imports. from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -756,13 +756,14 @@ def _add_init( ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute], adder: MethodAdder, - method_name: str, + method_name: Literal["__init__", "__attrs_init__"], ) -> None: """Generate an __init__ method for the attributes and add it to the class.""" - # Convert attributes to arguments with kw_only arguments at the end of + # Convert attributes to arguments with kw_only arguments at the end of # the argument list pos_args = [] kw_only_args = [] + sym_table = ctx.cls.info.names for attribute in attributes: if not attribute.init: continue @@ -770,6 +771,13 @@ def _add_init( kw_only_args.append(attribute.argument(ctx)) else: pos_args.append(attribute.argument(ctx)) + + # If the attribute is Final, present in `__init__` and has + # no default, make sure it doesn't error later. + if not attribute.has_default and attribute.name in sym_table: + sym_node = sym_table[attribute.name].node + if isinstance(sym_node, Var) and sym_node.is_final: + sym_node.final_set_in_init = True args = pos_args + kw_only_args if all( # We use getattr rather than instance checks because the variable.type diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index 4d27d5f39d1e..f555f2ea7011 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1834,3 +1834,36 @@ class Sub(Base): # This matches runtime semantics reveal_type(Sub) # N: Revealed type is "def (*, name: builtins.str, first_name: builtins.str, last_name: builtins.str) -> __main__.Sub" [builtins fixtures/property.pyi] + +[case testFinalInstanceAttribute] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +reveal_type(C) # N: Revealed type is "def (a: builtins.int) -> __main__.C" + +C(1).a = 2 # E: Cannot assign to final attribute "a" + +[builtins fixtures/property.pyi] + +[case testFinalInstanceAttributeInheritance] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +@define +class D(C): + b: Final[str] + +reveal_type(D) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> __main__.D" + +D(1, "").a = 2 # E: Cannot assign to final attribute "a" +D(1, "").b = "2" # E: Cannot assign to final attribute "b" + +[builtins fixtures/property.pyi] \ No newline at end of file From dde01d6bfbed3bd0a477d54b15f49e5187b38038 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 6 Dec 2022 15:27:26 +0000 Subject: [PATCH 108/292] [mypyc] Support tuples of native ints (#14252) A tuple such as `tuple[i64, i64]` can't have a dedicated error value, so use overlapping error values, similarly to how we support plain native integers such as `i64`. For a heterogeneous tuple such as as `tuple[i64, str]` we attempt to store the error value in the smallest item index where the value type supports error values (1 in this example). This affects error returns, undefined attributes, default arguments and undefined locals. --- mypyc/analysis/attrdefined.py | 4 +- mypyc/codegen/emit.py | 55 ++++++++++++------- mypyc/codegen/emitclass.py | 6 +- mypyc/codegen/emitfunc.py | 3 +- mypyc/codegen/emitwrapper.py | 8 +-- mypyc/ir/func_ir.py | 4 +- mypyc/ir/ops.py | 5 +- mypyc/ir/rtypes.py | 1 + mypyc/irbuild/builder.py | 3 +- mypyc/irbuild/env_class.py | 4 +- mypyc/irbuild/ll_builder.py | 2 +- mypyc/test-data/run-i64.test | 100 +++++++++++++++++++++++++++++++++- mypyc/transform/exceptions.py | 22 ++++++-- mypyc/transform/uninit.py | 4 +- 14 files changed, 173 insertions(+), 48 deletions(-) diff --git a/mypyc/analysis/attrdefined.py b/mypyc/analysis/attrdefined.py index dc871a93eba1..1368b7f5315f 100644 --- a/mypyc/analysis/attrdefined.py +++ b/mypyc/analysis/attrdefined.py @@ -91,7 +91,7 @@ def foo(self) -> int: SetMem, Unreachable, ) -from mypyc.ir.rtypes import RInstance, is_fixed_width_rtype +from mypyc.ir.rtypes import RInstance # If True, print out all always-defined attributes of native classes (to aid # debugging and testing) @@ -424,5 +424,5 @@ def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: if len(cl.base_mro) > 1: cl.bitmap_attrs.extend(cl.base_mro[1].bitmap_attrs) for n, t in cl.attributes.items(): - if is_fixed_width_rtype(t) and not cl.is_always_defined(n): + if t.error_overlap and not cl.is_always_defined(n): cl.bitmap_attrs.append(n) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 15dece700a1e..368c5dd366ea 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -364,7 +364,8 @@ def _emit_attr_bitmap_update( self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str, clear: bool ) -> None: if value: - self.emit_line(f"if (unlikely({value} == {self.c_undefined_value(rtype)})) {{") + check = self.error_value_check(rtype, value, "==") + self.emit_line(f"if (unlikely({check})) {{") index = cl.bitmap_attrs.index(attr) mask = 1 << (index & (BITMAP_BITS - 1)) bitmap = self.attr_bitmap_expr(obj, cl, index) @@ -389,16 +390,10 @@ def emit_undefined_attr_check( *, unlikely: bool = False, ) -> None: - if isinstance(rtype, RTuple): - check = "{}".format( - self.tuple_undefined_check_cond(rtype, attr_expr, self.c_undefined_value, compare) - ) - else: - undefined = self.c_undefined_value(rtype) - check = f"{attr_expr} {compare} {undefined}" + check = self.error_value_check(rtype, attr_expr, compare) if unlikely: check = f"unlikely({check})" - if is_fixed_width_rtype(rtype): + if rtype.error_overlap: index = cl.bitmap_attrs.index(attr) bit = 1 << (index & (BITMAP_BITS - 1)) attr = self.bitmap_field(index) @@ -406,25 +401,47 @@ def emit_undefined_attr_check( check = f"{check} && !(({obj_expr})->{attr} & {bit})" self.emit_line(f"if ({check}) {{") + def error_value_check(self, rtype: RType, value: str, compare: str) -> str: + if isinstance(rtype, RTuple): + return self.tuple_undefined_check_cond( + rtype, value, self.c_error_value, compare, check_exception=False + ) + else: + return f"{value} {compare} {self.c_error_value(rtype)}" + def tuple_undefined_check_cond( self, rtuple: RTuple, tuple_expr_in_c: str, c_type_compare_val: Callable[[RType], str], compare: str, + *, + check_exception: bool = True, ) -> str: if len(rtuple.types) == 0: # empty tuple return "{}.empty_struct_error_flag {} {}".format( tuple_expr_in_c, compare, c_type_compare_val(int_rprimitive) ) - item_type = rtuple.types[0] + if rtuple.error_overlap: + i = 0 + item_type = rtuple.types[0] + else: + for i, typ in enumerate(rtuple.types): + if not typ.error_overlap: + item_type = rtuple.types[i] + break + else: + assert False, "not expecting tuple with error overlap" if isinstance(item_type, RTuple): return self.tuple_undefined_check_cond( - item_type, tuple_expr_in_c + ".f0", c_type_compare_val, compare + item_type, tuple_expr_in_c + f".f{i}", c_type_compare_val, compare ) else: - return f"{tuple_expr_in_c}.f0 {compare} {c_type_compare_val(item_type)}" + check = f"{tuple_expr_in_c}.f{i} {compare} {c_type_compare_val(item_type)}" + if rtuple.error_overlap and check_exception: + check += " && PyErr_Occurred()" + return check def tuple_undefined_value(self, rtuple: RTuple) -> str: return "tuple_undefined_" + rtuple.unique_id @@ -986,18 +1003,18 @@ def emit_box( def emit_error_check(self, value: str, rtype: RType, failure: str) -> None: """Emit code for checking a native function return value for uncaught exception.""" - if is_fixed_width_rtype(rtype): - # The error value is also valid as a normal value, so we need to also check - # for a raised exception. - self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") - elif not isinstance(rtype, RTuple): - self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") - else: + if isinstance(rtype, RTuple): if len(rtype.types) == 0: return # empty tuples can't fail. else: cond = self.tuple_undefined_check_cond(rtype, value, self.c_error_value, "==") self.emit_line(f"if ({cond}) {{") + elif rtype.error_overlap: + # The error value is also valid as a normal value, so we need to also check + # for a raised exception. + self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") + else: + self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") self.emit_lines(failure, "}") def emit_gc_visit(self, target: str, rtype: RType) -> None: diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 0fdb6e8a98c3..1e774bbd0185 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -20,7 +20,7 @@ from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall from mypyc.ir.class_ir import ClassIR, VTableEntries from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR -from mypyc.ir.rtypes import RTuple, RType, is_fixed_width_rtype, object_rprimitive +from mypyc.ir.rtypes import RTuple, RType, object_rprimitive from mypyc.namegen import NameGenerator from mypyc.sametype import is_same_type @@ -960,13 +960,13 @@ def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> N emitter.emit_lines("if (!tmp)", " return -1;") emitter.emit_inc_ref("tmp", rtype) emitter.emit_line(f"self->{attr_field} = tmp;") - if is_fixed_width_rtype(rtype) and not always_defined: + if rtype.error_overlap and not always_defined: emitter.emit_attr_bitmap_set("tmp", "self", rtype, cl, attr) if deletable: emitter.emit_line("} else") emitter.emit_line(f" self->{attr_field} = {emitter.c_undefined_value(rtype)};") - if is_fixed_width_rtype(rtype): + if rtype.error_overlap: emitter.emit_attr_bitmap_clear("self", rtype, cl, attr) emitter.emit_line("return 0;") emitter.emit_line("}") diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 2c096655f41e..534c4d1f20ea 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -60,7 +60,6 @@ RStruct, RTuple, RType, - is_fixed_width_rtype, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -442,7 +441,7 @@ def visit_set_attr(self, op: SetAttr) -> None: self.emitter.emit_dec_ref(attr_expr, attr_rtype) if not always_defined: self.emitter.emit_line("}") - elif is_fixed_width_rtype(attr_rtype) and not cl.is_always_defined(op.attr): + elif attr_rtype.error_overlap and not cl.is_always_defined(op.attr): # If there is overlap with the error value, update bitmap to mark # attribute as defined. self.emitter.emit_attr_bitmap_set(src, obj, attr_rtype, cl, op.attr) diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index 1abab53bc39d..1fa1e8548e07 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -32,7 +32,6 @@ RInstance, RType, is_bool_rprimitive, - is_fixed_width_rtype, is_int_rprimitive, is_object_rprimitive, object_rprimitive, @@ -718,9 +717,10 @@ def generate_arg_check( """ error = error or AssignHandler() if typ.is_unboxed: - if is_fixed_width_rtype(typ) and optional: + if typ.error_overlap and optional: # Update bitmap is value is provided. - emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = 0;") + init = emitter.c_undefined_value(typ) + emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = {init};") emitter.emit_line(f"if (obj_{name} != NULL) {{") bitmap = bitmap_name(bitmap_arg_index // BITMAP_BITS) emitter.emit_line(f"{bitmap} |= 1 << {bitmap_arg_index & (BITMAP_BITS - 1)};") @@ -835,7 +835,7 @@ def emit_arg_processing( optional=optional, bitmap_arg_index=bitmap_arg_index, ) - if optional and is_fixed_width_rtype(typ): + if optional and typ.error_overlap: bitmap_arg_index += 1 def emit_call(self, not_implemented_handler: str = "") -> None: diff --git a/mypyc/ir/func_ir.py b/mypyc/ir/func_ir.py index dc83de24300a..933230a853a8 100644 --- a/mypyc/ir/func_ir.py +++ b/mypyc/ir/func_ir.py @@ -17,7 +17,7 @@ Register, Value, ) -from mypyc.ir.rtypes import RType, bitmap_rprimitive, deserialize_type, is_fixed_width_rtype +from mypyc.ir.rtypes import RType, bitmap_rprimitive, deserialize_type from mypyc.namegen import NameGenerator @@ -113,7 +113,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncSignature: def num_bitmap_args(args: tuple[RuntimeArg, ...]) -> int: n = 0 for arg in args: - if is_fixed_width_rtype(arg.type) and arg.kind.is_optional(): + if arg.type.error_overlap and arg.kind.is_optional(): n += 1 return (n + (BITMAP_BITS - 1)) // BITMAP_BITS diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 361221f5b710..1f79ba829d76 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -28,7 +28,6 @@ int_rprimitive, is_bit_rprimitive, is_bool_rprimitive, - is_fixed_width_rtype, is_int_rprimitive, is_none_rprimitive, is_pointer_rprimitive, @@ -632,7 +631,7 @@ def __init__(self, obj: Value, attr: str, line: int, *, borrow: bool = False) -> self.class_type = obj.type attr_type = obj.type.attr_type(attr) self.type = attr_type - if is_fixed_width_rtype(attr_type): + if attr_type.error_overlap: self.error_kind = ERR_MAGIC_OVERLAPPING self.is_borrowed = borrow and attr_type.is_refcounted @@ -785,7 +784,7 @@ class TupleGet(RegisterOp): error_kind = ERR_NEVER - def __init__(self, src: Value, index: int, line: int) -> None: + def __init__(self, src: Value, index: int, line: int = -1) -> None: super().__init__(line) self.src = src self.index = index diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index 6db3f249ca9b..7fe8a940e4c2 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -572,6 +572,7 @@ def __init__(self, types: list[RType]) -> None: # Nominally the max c length is 31 chars, but I'm not honestly worried about this. self.struct_name = f"tuple_{self.unique_id}" self._ctype = f"{self.struct_name}" + self.error_overlap = all(t.error_overlap for t in self.types) and bool(self.types) def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_rtuple(self) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 443fa6886ea6..6310c25c64fb 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -90,7 +90,6 @@ c_pyssize_t_rprimitive, dict_rprimitive, int_rprimitive, - is_fixed_width_rtype, is_list_rprimitive, is_none_rprimitive, is_object_rprimitive, @@ -1308,7 +1307,7 @@ def get_default() -> Value: assert isinstance(target, AssignmentTargetRegister) reg = target.register - if not is_fixed_width_rtype(reg.type): + if not reg.type.error_overlap: builder.assign_if_null(target.register, get_default, arg.initializer.line) else: builder.assign_if_bitmap_unset( diff --git a/mypyc/irbuild/env_class.py b/mypyc/irbuild/env_class.py index 416fba633482..ded8072deb63 100644 --- a/mypyc/irbuild/env_class.py +++ b/mypyc/irbuild/env_class.py @@ -21,7 +21,7 @@ def g() -> int: from mypyc.common import BITMAP_BITS, ENV_ATTR_NAME, SELF_NAME, bitmap_name from mypyc.ir.class_ir import ClassIR from mypyc.ir.ops import Call, GetAttr, SetAttr, Value -from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, is_fixed_width_rtype, object_rprimitive +from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, object_rprimitive from mypyc.irbuild.builder import IRBuilder, SymbolTarget from mypyc.irbuild.context import FuncInfo, GeneratorClass, ImplicitClass from mypyc.irbuild.targets import AssignmentTargetAttr @@ -163,7 +163,7 @@ def num_bitmap_args(builder: IRBuilder, args: list[Argument]) -> int: n = 0 for arg in args: t = builder.type_to_rtype(arg.variable.type) - if is_fixed_width_rtype(t) and arg.kind.is_optional(): + if t.error_overlap and arg.kind.is_optional(): n += 1 return (n + (BITMAP_BITS - 1)) // BITMAP_BITS diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index fe0af5b13a73..88b35a95c08c 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -1034,7 +1034,7 @@ def native_args_to_positional( bitmap = 0 c = 0 for lst, arg in zip(formal_to_actual, sig_args): - if arg.kind.is_optional() and is_fixed_width_rtype(arg.type): + if arg.kind.is_optional() and arg.type.error_overlap: if i * BITMAP_BITS <= c < (i + 1) * BITMAP_BITS: if lst: bitmap |= 1 << (c & (BITMAP_BITS - 1)) diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index ee0a09760ab1..893b3f808f24 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -395,7 +395,7 @@ def test_for_loop() -> None: assert sum([x * x for x in range(i64(4 + int()))]) == 1 + 4 + 9 [case testI64ErrorValuesAndUndefined] -from typing import Any +from typing import Any, Tuple import sys from mypy_extensions import mypyc_attr @@ -430,6 +430,33 @@ def test_method_error_value() -> None: with assertRaises(ValueError): C().maybe_raise(0, True) +def maybe_raise_tuple(n: i64, error: bool) -> Tuple[i64, i64]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + f: Any = maybe_raise_tuple + for i in range(-1000, 1000): + assert f(i, False) == (i, i + 1) + with assertRaises(ValueError): + f(0, True) + +def maybe_raise_tuple2(n: i64, error: bool) -> Tuple[i64, int]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value_2() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple2(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + def test_unbox_int() -> None: for i in list(range(-1000, 1000)) + [-(1 << 63), (1 << 63) - 1]: o: Any = i @@ -733,8 +760,34 @@ def test_del() -> None: with assertRaises(AttributeError): o.x +class UndefinedTuple: + def __init__(self, x: i64, y: i64) -> None: + if x != 0: + self.t = (x, y) + +def test_undefined_native_int_tuple() -> None: + o = UndefinedTuple(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = UndefinedTuple(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + +def test_undefined_native_int_tuple_via_any() -> None: + cls: Any = UndefinedTuple + o: Any = cls(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = cls(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + [case testI64DefaultArgValues] -from typing import Any, Iterator +from typing import Any, Iterator, Tuple from typing_extensions import Final MAGIC: Final = -113 @@ -893,6 +946,31 @@ def test_kw_only_default_args() -> None: assert kw_only2(a=2, c=4) == 12 assert kw_only2(c=4, a=2) == 12 +def tuples(t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> i64: + return t[0] + t[1] + +def test_tuple_arg_defaults() -> None: + assert tuples() == 2 * MAGIC + assert tuples((1, 2)) == 3 + assert tuples((MAGIC, MAGIC)) == 2 * MAGIC + tuples2: Any = tuples + assert tuples2() == 2 * MAGIC + assert tuples2((1, 2)) == 3 + assert tuples2((MAGIC, MAGIC)) == 2 * MAGIC + +class TupleInit: + def __init__(self, t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> None: + self.t = t[0] + t[1] + +def test_tuple_init_arg_defaults() -> None: + assert TupleInit().t == 2 * MAGIC + assert TupleInit((1, 2)).t == 3 + assert TupleInit((MAGIC, MAGIC)).t == 2 * MAGIC + o: Any = TupleInit + assert o().t == 2 * MAGIC + assert o((1, 2)).t == 3 + assert o((MAGIC, MAGIC)).t == 2 * MAGIC + def many_args( a1: i64 = 0, a2: i64 = 1, @@ -1060,6 +1138,24 @@ def test_assign_error_value_conditionally() -> None: assert y == MAGIC assert z == MAGIC +def tuple_case(x: i64, y: i64) -> None: + if not int(): + t = (x, y) + assert t == (x, y) + if int(): + t2 = (x, y) + try: + print(t2) + except NameError as e: + assert str(e) == 'local variable "t2" referenced before assignment' + else: + assert False + +def test_conditionally_undefined_tuple() -> None: + tuple_case(2, 3) + tuple_case(-2, -3) + tuple_case(MAGIC, MAGIC) + def test_many_locals() -> None: x = int() if x: diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index cc638142c397..2851955ff38f 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -26,12 +26,14 @@ GetAttr, Integer, LoadErrorValue, + Op, RegisterOp, Return, SetAttr, + TupleGet, Value, ) -from mypyc.ir.rtypes import bool_rprimitive +from mypyc.ir.rtypes import RTuple, bool_rprimitive from mypyc.primitives.exc_ops import err_occurred_op from mypyc.primitives.registry import CFunctionDescription @@ -100,9 +102,7 @@ def split_blocks_at_errors( # semantics, using a temporary bool with value false target = Integer(0, bool_rprimitive) elif op.error_kind == ERR_MAGIC_OVERLAPPING: - errvalue = Integer(int(target.type.c_undefined), rtype=op.type) - comp = ComparisonOp(target, errvalue, ComparisonOp.EQ) - cur_block.ops.append(comp) + comp = insert_overlapping_error_value_check(cur_block.ops, target) new_block2 = BasicBlock() new_blocks.append(new_block2) branch = Branch( @@ -163,3 +163,17 @@ def adjust_error_kinds(block: BasicBlock) -> None: if isinstance(op, SetAttr): if op.class_type.class_ir.is_always_defined(op.attr): op.error_kind = ERR_NEVER + + +def insert_overlapping_error_value_check(ops: list[Op], target: Value) -> ComparisonOp: + """Append to ops to check for an overlapping error value.""" + typ = target.type + if isinstance(typ, RTuple): + item = TupleGet(target, 0) + ops.append(item) + return insert_overlapping_error_value_check(ops, item) + else: + errvalue = Integer(int(typ.c_undefined), rtype=typ) + op = ComparisonOp(target, errvalue, ComparisonOp.EQ) + ops.append(op) + return op diff --git a/mypyc/transform/uninit.py b/mypyc/transform/uninit.py index 041dd2545dff..6bf71ac4a8bc 100644 --- a/mypyc/transform/uninit.py +++ b/mypyc/transform/uninit.py @@ -20,7 +20,7 @@ Unreachable, Value, ) -from mypyc.ir.rtypes import bitmap_rprimitive, is_fixed_width_rtype +from mypyc.ir.rtypes import bitmap_rprimitive def insert_uninit_checks(ir: FuncIR) -> None: @@ -77,7 +77,7 @@ def split_blocks_at_uninits( init_registers.append(src) init_registers_set.add(src) - if not is_fixed_width_rtype(src.type): + if not src.type.error_overlap: cur_block.ops.append( Branch( src, From 7da2abf9a5a05fffc3d07caef31681c50bc9d6e7 Mon Sep 17 00:00:00 2001 From: jhance Date: Tue, 6 Dec 2022 10:08:06 -0800 Subject: [PATCH 109/292] Replace TypeList in constraints with TupleType (#14257) Now that the fallback is available, we can construct TupleTypes instead of TypeLists which will simplify constraint solving as it won't need to know to match TupleTypes with TypeLists. --- mypy/constraints.py | 22 ++++++++++++++++------ mypy/expandtype.py | 7 +++---- mypy/test/testconstraints.py | 16 ++++++++++++---- 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 6efb9997d36f..3a6553a307fd 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -29,7 +29,6 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeOfAny, TypeQuery, TypeType, @@ -135,7 +134,13 @@ def infer_constraints_for_callable( unpacked_type = get_proper_type(unpack_type.type) if isinstance(unpacked_type, TypeVarTupleType): - constraints.append(Constraint(unpacked_type, SUPERTYPE_OF, TypeList(actual_types))) + constraints.append( + Constraint( + unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types, unpacked_type.tuple_fallback), + ) + ) elif isinstance(unpacked_type, TupleType): # Prefixes get converted to positional args, so technically the only case we # should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out @@ -147,12 +152,13 @@ def infer_constraints_for_callable( suffix_len = len(unpacked_type.items) - 1 constraints.append( Constraint( - inner_unpacked_type, SUPERTYPE_OF, TypeList(actual_types[:-suffix_len]) + inner_unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback), ) ) else: assert False, "mypy bug: unhandled constraint inference case" - else: for actual in actuals: actual_arg_type = arg_types[actual] @@ -640,7 +646,9 @@ def visit_instance(self, template: Instance) -> list[Constraint]: if isinstance(instance_unpack, TypeVarTupleType): res.append( Constraint( - instance_unpack, SUBTYPE_OF, TypeList(list(mapped_middle)) + instance_unpack, + SUBTYPE_OF, + TupleType(list(mapped_middle), instance_unpack.tuple_fallback), ) ) elif ( @@ -742,7 +750,9 @@ def visit_instance(self, template: Instance) -> list[Constraint]: if isinstance(template_unpack, TypeVarTupleType): res.append( Constraint( - template_unpack, SUPERTYPE_OF, TypeList(list(mapped_middle)) + template_unpack, + SUPERTYPE_OF, + TupleType(list(mapped_middle), template_unpack.tuple_fallback), ) ) elif ( diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 96d556121fd4..1458fb74ce94 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -22,7 +22,6 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeType, TypeVarId, TypeVarLikeType, @@ -95,7 +94,9 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type: instance.type.type_var_tuple_prefix, instance.type.type_var_tuple_suffix, ) - variables = {tvars_middle[0].id: TypeList(list(args_middle))} + tvar = tvars_middle[0] + assert isinstance(tvar, TypeVarTupleType) + variables = {tvar.id: TupleType(list(args_middle), tvar.tuple_fallback)} instance_args = args_prefix + args_suffix tvars = tvars_prefix + tvars_suffix else: @@ -447,8 +448,6 @@ def expand_unpack_with_variables( repl = get_proper_type(variables.get(t.type.id, t)) if isinstance(repl, TupleType): return repl.items - if isinstance(repl, TypeList): - return repl.items elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple": return repl elif isinstance(repl, AnyType): diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index 6b8f596dd605..fc6960e0d8a0 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -5,7 +5,7 @@ from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.test.helpers import Suite from mypy.test.typefixture import TypeFixture -from mypy.types import Instance, TupleType, TypeList, UnpackType +from mypy.types import Instance, TupleType, UnpackType class ConstraintsSuite(Suite): @@ -27,13 +27,19 @@ def test_basic_type_var_tuple_subtype(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF - ) == [Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) + ] def test_basic_type_var_tuple(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF - ) == [Constraint(type_var=fx.ts, op=SUPERTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) + ) + ] def test_type_var_tuple_with_prefix_and_suffix(self) -> None: fx = self.fx @@ -45,7 +51,9 @@ def test_type_var_tuple_with_prefix_and_suffix(self) -> None: ) ) == { Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), - Constraint(type_var=fx.ts, op=SUPERTYPE_OF, target=TypeList([fx.b, fx.c])), + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) + ), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d), } From 695ea3017fee084c9d2ec17d9b28f8af905e3b63 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Thu, 8 Dec 2022 09:16:30 +1000 Subject: [PATCH 110/292] =?UTF-8?q?(=F0=9F=8E=81)=20canwecolor=F0=9F=9A=80?= =?UTF-8?q?=20(#14051)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Last time i checked it wasn't the 1960's. So I think the CI could be colorized. Configured pytest, tox, mypy(#7771) and pip¹ (I already already colorized black and isort when I initially added them) 1: Pip doesn't work yet https://github.com/pypa/pip/issues/10909, so this is just a placedholder for when it (hopefully) soon will. Co-authored-by: KotlinIsland --- .github/workflows/test.yml | 10 ++++++++++ tox.ini | 4 ++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3fdb83ff15b4..9cc2d82ad911 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -102,6 +102,16 @@ jobs: name: ${{ matrix.name }} env: TOX_SKIP_MISSING_INTERPRETERS: False + # Rich (pip) + FORCE_COLOR: 1 + # Tox + PY_COLORS: 1 + # Mypy (see https://github.com/python/mypy/issues/7771) + TERM: xterm-color + MYPY_FORCE_COLOR: 1 + MYPY_FORCE_TERMINAL_WIDTH: 200 + # Pytest + PYTEST_ADDOPTS: --color=yes steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 diff --git a/tox.ini b/tox.ini index 92810bed9981..a155ec726386 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,6 @@ minversion = 3.8.0 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} envlist = - py36, py37, py38, py39, @@ -14,7 +13,7 @@ isolated_build = true [testenv] description = run the test driver with {basepython} -passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) +passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) PYTEST_ADDOPTS deps = -rtest-requirements.txt commands = python -m pytest {posargs} @@ -27,6 +26,7 @@ commands = [testenv:type] description = type check ourselves +passenv = TERM MYPY_FORCE_COLOR MYPY_FORCE_TERMINAL_WIDTH commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py From 7849b8f15dccf69173b4ff172524c4d82a70bc2d Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sat, 10 Dec 2022 16:37:15 +0100 Subject: [PATCH 111/292] Rename `partially-defined` error codes (#14267) Rename `partially-defined` to `possibly-undefined` and `use-before-def` to `used-before-def`. Ref #14226 --- mypy/build.py | 14 +- mypy/errorcodes.py | 8 +- mypy/messages.py | 4 +- mypy/partially_defined.py | 20 +-- mypy/server/update.py | 2 +- ...ned.test => check-possibly-undefined.test} | 126 +++++++++--------- test-data/unit/check-python310.test | 8 +- test-data/unit/check-python38.test | 4 +- 8 files changed, 93 insertions(+), 93 deletions(-) rename test-data/unit/{check-partially-defined.test => check-possibly-undefined.test} (77%) diff --git a/mypy/build.py b/mypy/build.py index b85b49483739..7da3e71ce25e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -50,7 +50,7 @@ from mypy.indirection import TypeIndirectionVisitor from mypy.messages import MessageBuilder from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable, TypeInfo -from mypy.partially_defined import PartiallyDefinedVariableVisitor +from mypy.partially_defined import PossiblyUndefinedVariableVisitor from mypy.semanal import SemanticAnalyzer from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis from mypy.util import ( @@ -2347,18 +2347,18 @@ def type_check_second_pass(self) -> bool: self.time_spent_us += time_spent_us(t0) return result - def detect_partially_defined_vars(self, type_map: dict[Expression, Type]) -> None: + def detect_possibly_undefined_vars(self, type_map: dict[Expression, Type]) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" if self.tree.is_stub: # We skip stub files because they aren't actually executed. return manager = self.manager if manager.errors.is_error_code_enabled( - codes.PARTIALLY_DEFINED - ) or manager.errors.is_error_code_enabled(codes.USE_BEFORE_DEF): + codes.POSSIBLY_UNDEFINED + ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): manager.errors.set_file(self.xpath, self.tree.fullname, options=manager.options) self.tree.accept( - PartiallyDefinedVariableVisitor( + PossiblyUndefinedVariableVisitor( MessageBuilder(manager.errors, manager.modules), type_map, manager.options ) ) @@ -3412,7 +3412,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No graph[id].type_check_first_pass() if not graph[id].type_checker().deferred_nodes: unfinished_modules.discard(id) - graph[id].detect_partially_defined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars(graph[id].type_map()) graph[id].finish_passes() while unfinished_modules: @@ -3421,7 +3421,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No continue if not graph[id].type_check_second_pass(): unfinished_modules.discard(id) - graph[id].detect_partially_defined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars(graph[id].type_map()) graph[id].finish_passes() for id in stale: graph[id].generate_unused_ignore_notes() diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 1c15407a955b..6b266cc7b429 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -140,8 +140,8 @@ def __str__(self) -> str: ANNOTATION_UNCHECKED = ErrorCode( "annotation-unchecked", "Notify about type annotations in unchecked functions", "General" ) -PARTIALLY_DEFINED: Final[ErrorCode] = ErrorCode( - "partially-defined", +POSSIBLY_UNDEFINED: Final[ErrorCode] = ErrorCode( + "possibly-undefined", "Warn about variables that are defined only in some execution paths", "General", default_enabled=False, @@ -192,8 +192,8 @@ def __str__(self) -> str: "General", default_enabled=False, ) -USE_BEFORE_DEF: Final[ErrorCode] = ErrorCode( - "use-before-def", +USED_BEFORE_DEF: Final[ErrorCode] = ErrorCode( + "used-before-def", "Warn about variables that are used before they are defined", "General", default_enabled=False, diff --git a/mypy/messages.py b/mypy/messages.py index 85fa30512534..85811561e176 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1229,10 +1229,10 @@ def undefined_in_superclass(self, member: str, context: Context) -> None: self.fail(f'"{member}" undefined in superclass', context) def variable_may_be_undefined(self, name: str, context: Context) -> None: - self.fail(f'Name "{name}" may be undefined', context, code=codes.PARTIALLY_DEFINED) + self.fail(f'Name "{name}" may be undefined', context, code=codes.POSSIBLY_UNDEFINED) def var_used_before_def(self, name: str, context: Context) -> None: - self.fail(f'Name "{name}" is used before definition', context, code=codes.USE_BEFORE_DEF) + self.fail(f'Name "{name}" is used before definition', context, code=codes.USED_BEFORE_DEF) def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: actual = get_proper_type(actual) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 3ec8db3665cd..5f5253515b61 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -104,7 +104,7 @@ def skip_branch(self) -> None: assert len(self.branches) > 0 self.branches[-1].skipped = True - def is_partially_defined(self, name: str) -> bool: + def is_possibly_undefined(self, name: str) -> bool: assert len(self.branches) > 0 return name in self.branches[-1].may_be_defined @@ -213,10 +213,10 @@ def pop_undefined_ref(self, name: str) -> set[NameExpr]: assert len(self.scopes) > 0 return self._scope().pop_undefined_ref(name) - def is_partially_defined(self, name: str) -> bool: + def is_possibly_undefined(self, name: str) -> bool: assert len(self._scope().branch_stmts) > 0 # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`. - return self._scope().branch_stmts[-1].is_partially_defined(name) + return self._scope().branch_stmts[-1].is_possibly_undefined(name) def is_defined_in_different_branch(self, name: str) -> bool: """This will return true if a variable is defined in a branch that's not the current branch.""" @@ -243,7 +243,7 @@ def __init__(self) -> None: self.has_break = False -class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): +class PossiblyUndefinedVariableVisitor(ExtendedTraverserVisitor): """Detects the following cases: - A variable that's defined only part of the time. - If a variable is used before definition @@ -253,7 +253,7 @@ class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): x = 1 print(x) # Error: "x" may be undefined. - Example of a use before definition: + Example of a used before definition: x = y y: int = 2 @@ -273,15 +273,15 @@ def __init__( self.tracker.record_definition(name) def var_used_before_def(self, name: str, context: Context) -> None: - if self.msg.errors.is_error_code_enabled(errorcodes.USE_BEFORE_DEF): + if self.msg.errors.is_error_code_enabled(errorcodes.USED_BEFORE_DEF): self.msg.var_used_before_def(name, context) def variable_may_be_undefined(self, name: str, context: Context) -> None: - if self.msg.errors.is_error_code_enabled(errorcodes.PARTIALLY_DEFINED): + if self.msg.errors.is_error_code_enabled(errorcodes.POSSIBLY_UNDEFINED): self.msg.variable_may_be_undefined(name, context) def process_definition(self, name: str) -> None: - # Was this name previously used? If yes, it's a use-before-definition error. + # Was this name previously used? If yes, it's a used-before-definition error. refs = self.tracker.pop_undefined_ref(name) for ref in refs: self.var_used_before_def(name, ref) @@ -471,7 +471,7 @@ def visit_starred_pattern(self, o: StarredPattern) -> None: def visit_name_expr(self, o: NameExpr) -> None: if refers_to_builtin(o): return - if self.tracker.is_partially_defined(o.name): + if self.tracker.is_possibly_undefined(o.name): # A variable is only defined in some branches. self.variable_may_be_undefined(o.name, o) # We don't want to report the error on the same variable multiple times. @@ -488,7 +488,7 @@ def visit_name_expr(self, o: NameExpr) -> None: # 2. The variable is defined later in the code. # Case (1) will be caught by semantic analyzer. Case (2) is a forward ref that should # be caught by this visitor. Save the ref for later, so that if we see a definition, - # we know it's a use-before-definition scenario. + # we know it's a used-before-definition scenario. self.tracker.record_undefined_ref(o) super().visit_name_expr(o) diff --git a/mypy/server/update.py b/mypy/server/update.py index a1f57b5a6746..e9750dec1e2a 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -662,7 +662,7 @@ def restore(ids: list[str]) -> None: state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() - state.detect_partially_defined_vars(state.type_map()) + state.detect_possibly_undefined_vars(state.type_map()) t2 = time.time() state.finish_passes() t3 = time.time() diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-possibly-undefined.test similarity index 77% rename from test-data/unit/check-partially-defined.test rename to test-data/unit/check-possibly-undefined.test index 623e897e865d..d99943572a38 100644 --- a/test-data/unit/check-partially-defined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -1,5 +1,5 @@ [case testDefinedInOneBranch] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): a = 1 else: @@ -8,7 +8,7 @@ z = a + 1 # E: Name "a" may be undefined z = a + 1 # We only report the error on first occurrence. [case testElif] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): a = 1 elif int(): @@ -19,14 +19,14 @@ else: z = a + 1 # E: Name "a" may be undefined [case testUsedInIf] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): y = 1 if int(): x = y # E: Name "y" may be undefined [case testDefinedInAllBranches] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): a = 1 elif int(): @@ -36,13 +36,13 @@ else: z = a + 1 [case testOmittedElse] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): a = 1 z = a + 1 # E: Name "a" may be undefined [case testUpdatedInIf] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined # Variable a is already defined. Just updating it in an "if" is acceptable. a = 1 if int(): @@ -50,7 +50,7 @@ if int(): z = a + 1 [case testNestedIf] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): if int(): a = 1 @@ -65,7 +65,7 @@ else: z = a + b # E: Name "a" may be undefined [case testVeryNestedIf] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): if int(): if int(): @@ -81,7 +81,7 @@ else: z = a + b # E: Name "a" may be undefined [case testTupleUnpack] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): (x, y) = (1, 2) @@ -91,7 +91,7 @@ a = y + x # E: Name "x" may be undefined a = y + z # E: Name "z" may be undefined [case testIndexExpr] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): *x, y = (1, 2) @@ -101,7 +101,7 @@ a = x # No error. b = y # E: Name "y" may be undefined [case testRedefined] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined y = 3 if int(): if int(): @@ -115,7 +115,7 @@ else: x = y + 2 [case testFunction] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f0() -> None: if int(): def some_func() -> None: @@ -134,21 +134,21 @@ def f1() -> None: some_func() # No error. [case testLambda] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f0(b: bool) -> None: if b: fn = lambda: 2 y = fn # E: Name "fn" may be undefined -[case testUseBeforeDefClass] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +[case testUsedBeforeDefClass] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f(x: A): # No error here. pass y = A() # E: Name "A" is used before definition class A: pass [case testClassScope] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def class C: x = 0 def f0(self) -> None: pass @@ -163,26 +163,26 @@ y = x # E: Name "x" is used before definition x = 1 [case testClassInsideFunction] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f() -> None: class C: pass c = C() # E: Name "C" is used before definition class C: pass -[case testUseBeforeDefFunc] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +[case testUsedBeforeDefFunc] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def foo() # E: Name "foo" is used before definition def foo(): pass [case testGenerator] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined if int(): a = 3 s = [a + 1 for a in [1, 2, 3]] x = a # E: Name "a" may be undefined [case testScope] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def foo() -> None: if int(): y = 2 @@ -192,7 +192,7 @@ if int(): x = y # E: Name "y" may be undefined [case testVarDefinedInOuterScopeUpdated] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: global x y = x @@ -201,7 +201,7 @@ def f0() -> None: x = 2 [case testNonlocalVar] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: x = 2 @@ -212,7 +212,7 @@ def f0() -> None: [case testGlobalDeclarationAfterUsage] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: y = x # E: Name "x" is used before definition global x @@ -220,7 +220,7 @@ def f0() -> None: x = 2 [case testVarDefinedInOuterScope] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: global x y = x # We do not detect such errors right now. @@ -228,21 +228,21 @@ def f0() -> None: f0() x = 1 [case testDefinedInOuterScopeNoError] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def foo() -> None: bar() def bar() -> None: foo() [case testFuncParams] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def foo(a: int) -> None: if int(): a = 2 x = a [case testWhile] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined while int(): a = 1 @@ -289,7 +289,7 @@ y = f # E: Name "f" may be undefined y = g [case testForLoop] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined for x in [1, 2, 3]: if x: x = 1 @@ -300,7 +300,7 @@ else: a = z + y # E: Name "y" may be undefined [case testReturn] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f1() -> int: if int(): x = 1 @@ -351,8 +351,8 @@ def f6() -> int: return x return x # E: Name "x" may be undefined -[case testDefinedDifferentBranchUseBeforeDef] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +[case testDefinedDifferentBranchUsedBeforeDef] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: if int(): @@ -382,8 +382,8 @@ def f2() -> None: x = 2 w = x # No error. -[case testDefinedDifferentBranchPartiallyDefined] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +[case testDefinedDifferentBranchPossiblyUndefined] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: first_iter = True @@ -424,7 +424,7 @@ def f3() -> None: y = x # E: Name "x" may be undefined [case testAssert] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f1() -> int: if int(): x = 1 @@ -442,7 +442,7 @@ def f2() -> int: return x # E: Name "x" may be undefined [case testRaise] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f1() -> int: if int(): x = 1 @@ -461,7 +461,7 @@ def f2() -> int: [builtins fixtures/exception.pyi] [case testContinue] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f1() -> int: while int(): if int(): @@ -495,7 +495,7 @@ def f3() -> None: y = x [case testBreak] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f1() -> None: while int(): if int(): @@ -526,7 +526,7 @@ def f3() -> None: z = x # E: Name "x" may be undefined [case testNoReturn] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined from typing import NoReturn def fail() -> NoReturn: @@ -545,7 +545,7 @@ def f() -> None: z = x [case testDictComprehension] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined def f() -> None: for _ in [1, 2]: @@ -562,7 +562,7 @@ def f() -> None: [builtins fixtures/dict.pyi] [case testWithStmt] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined from contextlib import contextmanager @contextmanager @@ -582,7 +582,7 @@ def f() -> None: [builtins fixtures/tuple.pyi] [case testUnreachable] -# flags: --enable-error-code partially-defined +# flags: --enable-error-code possibly-undefined import typing if typing.TYPE_CHECKING: @@ -600,8 +600,8 @@ else: a = z [typing fixtures/typing-medium.pyi] -[case testUseBeforeDef] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDef] +# flags: --enable-error-code used-before-def def f0() -> None: x = y # E: Name "y" is used before definition @@ -611,7 +611,7 @@ def f2() -> None: if int(): pass else: - # No use-before-def error. + # No used-before-def error. y = z # E: Name "z" is not defined def inner2() -> None: @@ -632,8 +632,8 @@ def f4() -> None: x = z # E: Name "z" is used before definition z: int = 2 -[case testUseBeforeDefImportsBasic] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefImportsBasic] +# flags: --enable-error-code used-before-def import foo # type: ignore import x.y # type: ignore @@ -653,8 +653,8 @@ def f3() -> None: a = x.y # No error. x: int = 1 -[case testUseBeforeDefImportBasicRename] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefImportBasicRename] +# flags: --enable-error-code used-before-def import x.y as z # type: ignore from typing import Any @@ -674,16 +674,16 @@ def f3() -> None: a = y # E: Name "y" is used before definition y: int = 1 -[case testUseBeforeDefImportFrom] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefImportFrom] +# flags: --enable-error-code used-before-def from foo import x # type: ignore def f0() -> None: a = x # No error. x: int = 1 -[case testUseBeforeDefImportFromRename] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefImportFromRename] +# flags: --enable-error-code used-before-def from foo import x as y # type: ignore def f0() -> None: @@ -694,8 +694,8 @@ def f1() -> None: a = x # E: Name "x" is used before definition x: int = 1 -[case testUseBeforeDefFunctionDeclarations] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefFunctionDeclarations] +# flags: --enable-error-code used-before-def def f0() -> None: def inner() -> None: @@ -704,30 +704,30 @@ def f0() -> None: inner() # No error. inner = lambda: None -[case testUseBeforeDefBuiltins] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefBuiltins] +# flags: --enable-error-code used-before-def def f0() -> None: s = type(123) type = "abc" a = type -[case testUseBeforeDefImplicitModuleAttrs] -# flags: --enable-error-code use-before-def +[case testUsedBeforeDefImplicitModuleAttrs] +# flags: --enable-error-code used-before-def a = __name__ # No error. __name__ = "abc" [case testUntypedDef] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f(): if int(): x = 0 - z = y # No use-before-def error because def is untyped. - y = x # No partially-defined error because def is untyped. + z = y # No used-before-def error because def is untyped. + y = x # No possibly-undefined error because def is untyped. [case testUntypedDefCheckUntypedDefs] -# flags: --enable-error-code partially-defined --enable-error-code use-before-def --check-untyped-defs +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def --check-untyped-defs def f(): if int(): diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 1967e7f4810b..12fd2b43c80a 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1726,8 +1726,8 @@ def my_func(pairs: Iterable[tuple[S, S]]) -> None: # N: Revealed type is "Tuple[builtins.str, builtins.str]" [builtins fixtures/tuple.pyi] -[case testPartiallyDefinedMatch] -# flags: --enable-error-code partially-defined +[case testPossiblyUndefinedMatch] +# flags: --enable-error-code possibly-undefined def f0(x: int | str) -> int: match x: case int(): @@ -1789,8 +1789,8 @@ def f6(a: object) -> None: pass [builtins fixtures/tuple.pyi] -[case testPartiallyDefinedMatchUnreachable] -# flags: --enable-error-code partially-defined +[case testPossiblyUndefinedMatchUnreachable] +# flags: --enable-error-code possibly-undefined import typing def f0(x: int) -> int: diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 30bdadf900c3..c8fb1eb5aac8 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -707,8 +707,8 @@ def foo(name: str, /, **kwargs: Unpack[Person]) -> None: # Allowed ... [builtins fixtures/dict.pyi] -[case testPartiallyDefinedWithAssignmentExpr] -# flags: --python-version 3.8 --enable-error-code partially-defined +[case testPossiblyUndefinedWithAssignmentExpr] +# flags: --python-version 3.8 --enable-error-code possibly-undefined def f1() -> None: d = {0: 1} if int(): From b0003aff931c12298fb462aa4cece18610ee4d05 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 11 Dec 2022 19:30:13 -0800 Subject: [PATCH 112/292] stubtest: catch BaseException on module imports (#14284) This came up in https://github.com/python/typeshed/pull/9347 --- mypy/stubtest.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 5e39b996076b..0f8df607858f 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -205,7 +205,9 @@ def test_module(module_name: str) -> Iterator[Error]: try: runtime = silent_import_module(module_name) - except Exception as e: + except KeyboardInterrupt: + raise + except BaseException as e: yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) return @@ -1500,7 +1502,9 @@ def build_stubs(modules: list[str], options: Options, find_submodules: bool = Fa for m in pkgutil.walk_packages(runtime.__path__, runtime.__name__ + ".") if m.name not in all_modules ) - except Exception: + except KeyboardInterrupt: + raise + except BaseException: pass if sources: From 42dc1c431ed252de41f96d65367623ebac9e1bb8 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 12 Dec 2022 11:06:02 +0200 Subject: [PATCH 113/292] Replace unsafe PY_*_VERSION comparisons to fix for Python 4.0 (#14280) `#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7` For example, this is true for Python 3.7-3.11 but also true for Python 4.7-4.11. Instead, `PY_VERSION_HEX` should be used. https://docs.python.org/3.11/c-api/apiabiversion.html ```c /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. Use this for numeric comparisons, e.g. #if PY_VERSION_HEX >= ... */ ``` https://github.com/python/cpython/blob/2e279e85fece187b6058718ac7e82d1692461e26/Include/patchlevel.h#L29-L30 Remove compatibility code that only applied to EOL and unsupported Python versions (<= 3.6) --- mypy/stubtest.py | 3 --- mypyc/common.py | 12 ++---------- mypyc/lib-rt/CPy.h | 9 ++------- mypyc/lib-rt/getargsfast.c | 5 ----- mypyc/lib-rt/pythonsupport.h | 29 +++-------------------------- 5 files changed, 7 insertions(+), 51 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 0f8df607858f..a7a72235fed1 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1533,9 +1533,6 @@ def get_typeshed_stdlib_modules( stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir) if version_info is None: version_info = sys.version_info[0:2] - # Typeshed's minimum supported Python 3 is Python 3.7 - if sys.version_info < (3, 7): - version_info = (3, 7) def exists_in_version(module: str) -> bool: assert version_info is not None diff --git a/mypyc/common.py b/mypyc/common.py index 6b0bbcee5fc9..7412ebef4752 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -44,13 +44,6 @@ PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 -# Python 3.5 on macOS uses a hybrid 32/64-bit build that requires some workarounds. -# The same generated C will be compiled in both 32 and 64 bit modes when building mypy -# wheels (for an unknown reason). -# -# Note that we use "in ['darwin']" because of https://github.com/mypyc/mypyc/issues/761. -IS_MIXED_32_64_BIT_BUILD: Final = sys.platform in ["darwin"] and sys.version_info < (3, 6) - # Maximum value for a short tagged integer. MAX_SHORT_INT: Final = 2 ** (8 * int(SIZEOF_SIZE_T) - 2) - 1 @@ -59,9 +52,8 @@ # Maximum value for a short tagged integer represented as a C integer literal. # -# Note: Assume that the compiled code uses the same bit width as mypyc, except for -# Python 3.5 on macOS. -MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT if not IS_MIXED_32_64_BIT_BUILD else 2**30 - 1 +# Note: Assume that the compiled code uses the same bit width as mypyc +MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT MIN_LITERAL_SHORT_INT: Final = -MAX_LITERAL_SHORT_INT - 1 # Decription of the C type used to track the definedness of attributes and diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 166c851d0155..7ee914a037dc 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -499,13 +499,8 @@ static inline bool CPy_KeepPropagating(void) { } // We want to avoid the public PyErr_GetExcInfo API for these because // it requires a bunch of spurious refcount traffic on the parts of -// the triple we don't care about. Unfortunately the layout of the -// data structure changed in 3.7 so we need to handle that. -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 +// the triple we don't care about. #define CPy_ExcState() PyThreadState_GET()->exc_info -#else -#define CPy_ExcState() PyThreadState_GET() -#endif void CPy_Raise(PyObject *exc); void CPy_Reraise(void); @@ -527,7 +522,7 @@ void CPy_AttributeError(const char *filename, const char *funcname, const char * // Misc operations -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 8 +#if PY_VERSION_HEX >= 0x03080000 #define CPy_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN(op, dealloc) #define CPy_TRASHCAN_END(op) Py_TRASHCAN_END #else diff --git a/mypyc/lib-rt/getargsfast.c b/mypyc/lib-rt/getargsfast.c index afb161e643c7..387deed4399b 100644 --- a/mypyc/lib-rt/getargsfast.c +++ b/mypyc/lib-rt/getargsfast.c @@ -18,9 +18,6 @@ #include #include "CPy.h" -/* None of this is supported on Python 3.6 or earlier */ -#if PY_VERSION_HEX >= 0x03070000 - #define PARSER_INITED(parser) ((parser)->kwtuple != NULL) /* Forward */ @@ -570,5 +567,3 @@ skipitem_fast(const char **p_format, va_list *p_va) *p_format = format; } - -#endif diff --git a/mypyc/lib-rt/pythonsupport.h b/mypyc/lib-rt/pythonsupport.h index cd66c4cb4df8..8a1159a98853 100644 --- a/mypyc/lib-rt/pythonsupport.h +++ b/mypyc/lib-rt/pythonsupport.h @@ -22,7 +22,6 @@ extern "C" { ///////////////////////////////////////// // Adapted from bltinmodule.c in Python 3.7.0 -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 _Py_IDENTIFIER(__mro_entries__); static PyObject* update_bases(PyObject *bases) @@ -96,16 +95,8 @@ update_bases(PyObject *bases) Py_XDECREF(new_bases); return NULL; } -#else -static PyObject* -update_bases(PyObject *bases) -{ - return bases; -} -#endif // From Python 3.7's typeobject.c -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 _Py_IDENTIFIER(__init_subclass__); static int init_subclass(PyTypeObject *type, PyObject *kwds) @@ -134,14 +125,6 @@ init_subclass(PyTypeObject *type, PyObject *kwds) return 0; } -#else -static int -init_subclass(PyTypeObject *type, PyObject *kwds) -{ - return 0; -} -#endif - // Adapted from longobject.c in Python 3.7.0 /* This function adapted from PyLong_AsLongLongAndOverflow, but with @@ -306,7 +289,7 @@ list_count(PyListObject *self, PyObject *value) return CPyTagged_ShortFromSsize_t(count); } -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 8 +#if PY_VERSION_HEX < 0x03080000 static PyObject * _PyDict_GetItemStringWithError(PyObject *v, const char *key) { @@ -321,13 +304,7 @@ _PyDict_GetItemStringWithError(PyObject *v, const char *key) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 6 -/* _PyUnicode_EqualToASCIIString got added in 3.5.3 (argh!) so we can't actually know - * whether it will be present at runtime, so we just assume we don't have it in 3.5. */ -#define CPyUnicode_EqualToASCIIString(x, y) (PyUnicode_CompareWithASCIIString((x), (y)) == 0) -#elif PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 #define CPyUnicode_EqualToASCIIString(x, y) _PyUnicode_EqualToASCIIString(x, y) -#endif // Adapted from genobject.c in Python 3.7.2 // Copied because it wasn't in 3.5.2 and it is undocumented anyways. @@ -390,7 +367,7 @@ _CPyDictView_New(PyObject *dict, PyTypeObject *type) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >=10 +#if PY_VERSION_HEX >= 0x030A0000 // 3.10 static int _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { PyObject *tmp = NULL; @@ -404,7 +381,7 @@ _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { #define _CPyObject_HasAttrId _PyObject_HasAttrId #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 9 +#if PY_VERSION_HEX < 0x03090000 // OneArgs and NoArgs functions got added in 3.9 #define _PyObject_CallMethodIdNoArgs(self, name) \ _PyObject_CallMethodIdObjArgs((self), (name), NULL) From 7eef68af8b91e4509ddf73eb37c22a1a2197039d Mon Sep 17 00:00:00 2001 From: Harrison McCarty Date: Tue, 13 Dec 2022 14:16:52 -0800 Subject: [PATCH 114/292] Added error for class attribute access with slot (#14125) Fixed #13103 Adds a check to class attribute access to ensure it isn't a defined slot. --- mypy/checkmember.py | 4 ++++ mypy/message_registry.py | 1 + test-data/unit/check-slots.test | 10 ++++++++++ 3 files changed, 15 insertions(+) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 554b49d3eda2..e3c7c95a64b9 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -912,6 +912,10 @@ def analyze_class_attribute_access( if isinstance(node.node, TypeInfo): mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context) + # Refuse class attribute access if slot defined + if info.slots and name in info.slots: + mx.msg.fail(message_registry.CLASS_VAR_CONFLICTS_SLOTS.format(name), mx.context) + # If a final attribute was declared on `self` in `__init__`, then it # can't be accessed on the class object. if node.implicit and isinstance(node.node, Var) and node.node.is_final: diff --git a/mypy/message_registry.py b/mypy/message_registry.py index a067763d8d66..7827a2818be9 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -137,6 +137,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: MODULE_LEVEL_GETATTRIBUTE: Final = ErrorMessage( "__getattribute__ is not valid at the module level" ) +CLASS_VAR_CONFLICTS_SLOTS: Final = '"{}" in __slots__ conflicts with class variable access' NAME_NOT_IN_SLOTS: Final = ErrorMessage( 'Trying to assign name "{}" that is not in "__slots__" of type "{}"' ) diff --git a/test-data/unit/check-slots.test b/test-data/unit/check-slots.test index 96e4eba3c966..8beb0d8bf3f7 100644 --- a/test-data/unit/check-slots.test +++ b/test-data/unit/check-slots.test @@ -517,3 +517,13 @@ class A: self.b = 2 self.missing = 3 [builtins fixtures/tuple.pyi] + +[case testSlotsWithClassVar] +from typing import ClassVar +class X: + __slots__ = ('a',) + a: int +x = X() +X.a # E: "a" in __slots__ conflicts with class variable access +x.a +[builtins fixtures/tuple.pyi] From d62be28cc85efd759d808b983e1cf55242c2d71a Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Wed, 14 Dec 2022 19:42:33 +1000 Subject: [PATCH 115/292] Run against 3.12-dev in CI (#14050) --- .github/workflows/test.yml | 18 ++++++++++++++++++ mypyc/test/test_run.py | 3 +++ test-requirements.txt | 2 +- 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9cc2d82ad911..a02378cc01ab 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -137,3 +137,21 @@ jobs: run: tox -e ${{ matrix.toxenv }} --notest - name: Test run: tox -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} + + python-nightly: + runs-on: ubuntu-latest + name: Test suite with Python nightly + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.12-dev' + - name: Install tox + run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + - name: Setup tox environment + run: tox -e py --notest + - name: Test + run: tox -e py --skip-pkg-install -- "-n 2" + continue-on-error: true + - name: Mark as a success + run: exit 0 diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index fff775ebfab5..c867c9d37dac 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -312,6 +312,9 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> stderr=subprocess.STDOUT, env=env, ) + if sys.version_info >= (3, 12): + # TODO: testDecorators1 hangs on 3.12, remove this once fixed + proc.wait(timeout=30) output = proc.communicate()[0].decode("utf8") outlines = output.splitlines() diff --git a/test-requirements.txt b/test-requirements.txt index 6f0c1b065ad4..8ae94237f5ea 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,7 +7,7 @@ flake8==5.0.4 # must match version in .pre-commit-config.yaml flake8-bugbear==22.9.23 # must match version in .pre-commit-config.yaml flake8-noqa==1.2.9 # must match version in .pre-commit-config.yaml isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml -lxml>=4.9.1; python_version<'3.11' or sys_platform!='win32' +lxml>=4.9.1; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 pytest>=6.2.4 From 369525071a548c03ddea1c428207c8801e73192f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 14 Dec 2022 17:35:56 -0800 Subject: [PATCH 116/292] Sync typeshed (#14295) Source commit: https://github.com/python/typeshed/commit/9bddd3a3f1abfaf6335c2139a77ff1ff69eb4b54 --- mypy/typeshed/stdlib/_ast.pyi | 6 ++-- mypy/typeshed/stdlib/asyncio/runners.pyi | 9 +++++- mypy/typeshed/stdlib/email/message.pyi | 7 +++-- mypy/typeshed/stdlib/http/client.pyi | 8 +++++- .../stdlib/multiprocessing/context.pyi | 4 +-- mypy/typeshed/stdlib/multiprocessing/util.pyi | 4 +-- mypy/typeshed/stdlib/types.pyi | 2 +- mypy/typeshed/stdlib/typing.pyi | 7 +++-- mypy/typeshed/stdlib/unittest/case.pyi | 2 +- mypy/typeshed/stdlib/zipfile.pyi | 28 +++++++++++-------- 10 files changed, 51 insertions(+), 26 deletions(-) diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index f723b7eff8bb..7bc47266d713 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -104,14 +104,14 @@ class Assign(stmt): class AugAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") - target: expr + target: Name | Attribute | Subscript op: operator value: expr class AnnAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") - target: expr + target: Name | Attribute | Subscript annotation: expr value: expr | None simple: int @@ -355,7 +355,7 @@ if sys.version_info >= (3, 8): class NamedExpr(expr): if sys.version_info >= (3, 10): __match_args__ = ("target", "value") - target: expr + target: Name value: expr class Attribute(expr): diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 49d236bbee9e..74ed83ed8dc4 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -3,6 +3,7 @@ from _typeshed import Self from collections.abc import Callable, Coroutine from contextvars import Context from typing import Any, TypeVar +from typing_extensions import final from .events import AbstractEventLoop @@ -13,6 +14,7 @@ else: _T = TypeVar("_T") if sys.version_info >= (3, 11): + @final class Runner: def __init__(self, *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...) -> None: ... def __enter__(self: Self) -> Self: ... @@ -21,7 +23,12 @@ if sys.version_info >= (3, 11): def get_loop(self) -> AbstractEventLoop: ... def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = ...) -> _T: ... -if sys.version_info >= (3, 8): +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ... + ) -> _T: ... + +elif sys.version_info >= (3, 8): def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = ...) -> _T: ... else: diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 3c59aeeb2d01..c6b77cdde054 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -5,7 +5,7 @@ from email.charset import Charset from email.contentmanager import ContentManager from email.errors import MessageDefect from email.policy import Policy -from typing import Any, TypeVar +from typing import Any, TypeVar, overload from typing_extensions import TypeAlias __all__ = ["Message", "EmailMessage"] @@ -54,7 +54,10 @@ class Message: def get_filename(self, failobj: _T = ...) -> _T | str: ... def get_boundary(self, failobj: _T = ...) -> _T | str: ... def set_boundary(self, boundary: str) -> None: ... - def get_content_charset(self, failobj: _T = ...) -> _T | str: ... + @overload + def get_content_charset(self) -> str | None: ... + @overload + def get_content_charset(self, failobj: _T) -> str | _T: ... def get_charsets(self, failobj: _T = ...) -> _T | list[str]: ... def walk(self: Self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index ad794ed9b073..53cefc0a33d1 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -154,7 +154,13 @@ class HTTPConnection: blocksize: int = ..., ) -> None: ... def request( - self, method: str, url: str, body: _DataType | None = ..., headers: Mapping[str, str] = ..., *, encode_chunked: bool = ... + self, + method: str, + url: str, + body: _DataType | str | None = ..., + headers: Mapping[str, str] = ..., + *, + encode_chunked: bool = ..., ) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index f6380e2cfcbf..6622dca19ade 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -2,7 +2,7 @@ import ctypes import sys from collections.abc import Callable, Iterable, Sequence from ctypes import _CData -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from multiprocessing import popen_fork, popen_forkserver, popen_spawn_posix, popen_spawn_win32, queues, synchronize from multiprocessing.managers import SyncManager from multiprocessing.pool import Pool as _Pool @@ -107,7 +107,7 @@ class BaseContext: ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... - def log_to_stderr(self, level: str | None = ...) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = ...) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: list[str]) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index 4b93b7a6a472..263781da9432 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -1,7 +1,7 @@ import threading from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from typing import Any, SupportsInt from typing_extensions import SupportsIndex @@ -37,7 +37,7 @@ def debug(msg: object, *args: object) -> None: ... def info(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... def get_logger() -> Logger: ... -def log_to_stderr(level: int | None = ...) -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = ...) -> Logger: ... def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: bool diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 4047cf84593d..a40b6280f47c 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -569,7 +569,7 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable # The type: ignore is due to overlapping overloads, not the use of ParamSpec @overload -def coroutine(func: Callable[_P, Generator[_R, Any, Any]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] @overload def coroutine(func: _Fn) -> _Fn: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index cc27ae7dbda2..71018003b6d9 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -325,7 +325,7 @@ class SupportsRound(Protocol[_T_co]): def __round__(self, __ndigits: int) -> _T_co: ... @runtime_checkable -class Sized(Protocol): +class Sized(Protocol, metaclass=ABCMeta): @abstractmethod def __len__(self) -> int: ... @@ -452,7 +452,10 @@ class Container(Protocol[_T_co]): def __contains__(self, __x: object) -> bool: ... @runtime_checkable -class Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ... +class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): + # Implement Sized (but don't have it as a base class). + @abstractmethod + def __len__(self) -> int: ... class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @overload diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index c75539a97368..42633ed13bb8 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -104,7 +104,7 @@ class TestCase: def tearDownClass(cls) -> None: ... def run(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... - def skipTest(self, reason: Any) -> None: ... + def skipTest(self, reason: Any) -> NoReturn: ... def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... def debug(self) -> None: ... if sys.version_info < (3, 11): diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index e964cd6eda87..60134c915da7 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -29,6 +29,7 @@ _DateTuple: TypeAlias = tuple[int, int, int, int, int, int] _ReadWriteMode: TypeAlias = Literal["r", "w"] _ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] _ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"] +_CompressionMode: TypeAlias = Literal[0, 8, 12, 14] class BadZipFile(Exception): ... @@ -100,7 +101,7 @@ class ZipFile: fp: IO[bytes] | None NameToInfo: dict[str, ZipInfo] start_dir: int # undocumented - compression: int # undocumented + compression: _CompressionMode # undocumented compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented pwd: bytes | None # undocumented @@ -110,7 +111,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: Literal["r"] = ..., - compression: int = ..., + compression: _CompressionMode = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -122,7 +123,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: int = ..., + compression: _CompressionMode = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -134,7 +135,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: int = ..., + compression: _CompressionMode = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -145,7 +146,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: int = ..., + compression: _CompressionMode = ..., allowZip64: bool = ..., compresslevel: int | None = ..., ) -> None: ... @@ -184,14 +185,19 @@ class ZipFile: class PyZipFile(ZipFile): def __init__( - self, file: str | IO[bytes], mode: _ZipFileMode = ..., compression: int = ..., allowZip64: bool = ..., optimize: int = ... + self, + file: str | IO[bytes], + mode: _ZipFileMode = ..., + compression: _CompressionMode = ..., + allowZip64: bool = ..., + optimize: int = ..., ) -> None: ... def writepy(self, pathname: str, basename: str = ..., filterfunc: Callable[[str], bool] | None = ...) -> None: ... class ZipInfo: filename: str date_time: _DateTuple - compress_type: int + compress_type: _CompressionMode comment: bytes extra: bytes create_system: int @@ -269,10 +275,10 @@ if sys.version_info >= (3, 8): def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... -ZIP_STORED: int -ZIP_DEFLATED: int +ZIP_STORED: Literal[0] +ZIP_DEFLATED: Literal[8] ZIP64_LIMIT: int ZIP_FILECOUNT_LIMIT: int ZIP_MAX_COMMENT: int -ZIP_BZIP2: int -ZIP_LZMA: int +ZIP_BZIP2: Literal[12] +ZIP_LZMA: Literal[14] From df6e828198205e91e88b2f202e72441c531d5227 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 15 Dec 2022 10:18:08 +0000 Subject: [PATCH 117/292] Constant fold initializers of final variables (#14283) Now mypy can figure out the values of final variables even if the initializer has some operations on constant values: ``` A: Final = 2 # This has always worked A: Final = -(1 << 2) # This is now supported B: Final = 'x' + 'y' # This also now works ``` Currently we support integer arithmetic and bitwise operations, and string concatenation. This can be useful with literal types, but my main goal was to improve constant folding in mypyc. In particular, this helps constant folding with native ints in cases like these: ``` FLAG1: Final = 1 << 4 FLAG2: Final = 1 << 5 def f() -> i64: return FLAG1 | FLAG2 # Can now be constant folded ``` We still have another constant folding pass in mypyc, since it does some things more aggressively (e.g. it constant folds some member expression references). Work on mypyc/mypyc#772. Also helps with mypyc/mypyc#862. --- mypy/constant_fold.py | 116 ++++++++++ mypy/semanal.py | 73 +++--- mypy/types.py | 5 +- mypyc/irbuild/constant_fold.py | 59 +---- mypyc/test-data/irbuild-basic.test | 2 +- mypyc/test-data/irbuild-constant-fold.test | 14 +- mypyc/test-data/irbuild-i64.test | 41 ++++ mypyc/test-data/irbuild-int.test | 48 ++++ mypyc/test-data/run-classes.test | 11 + test-data/unit/check-expressions.test | 2 +- test-data/unit/check-inference.test | 2 +- test-data/unit/check-modules.test | 16 +- test-data/unit/errorstream.test | 4 +- test-data/unit/semanal-basic.test | 50 +++-- test-data/unit/semanal-classes.test | 15 +- test-data/unit/semanal-expressions.test | 55 +++-- test-data/unit/semanal-modules.test | 60 +++-- test-data/unit/semanal-python310.test | 50 +++-- test-data/unit/semanal-statements.test | 248 +++++++++++++++++++-- test-data/unit/semanal-symtable.test | 12 +- 20 files changed, 646 insertions(+), 237 deletions(-) create mode 100644 mypy/constant_fold.py diff --git a/mypy/constant_fold.py b/mypy/constant_fold.py new file mode 100644 index 000000000000..a22c1b9ba9e5 --- /dev/null +++ b/mypy/constant_fold.py @@ -0,0 +1,116 @@ +"""Constant folding of expressions. + +For example, 3 + 5 can be constant folded into 8. +""" + +from __future__ import annotations + +from typing import Union +from typing_extensions import Final + +from mypy.nodes import Expression, FloatExpr, IntExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var + +# All possible result types of constant folding +ConstantValue = Union[int, bool, float, str] +CONST_TYPES: Final = (int, bool, float, str) + + +def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | None: + """Return the constant value of an expression for supported operations. + + Among other things, support int arithmetic and string + concatenation. For example, the expression 3 + 5 has the constant + value 8. + + Also bind simple references to final constants defined in the + current module (cur_mod_id). Binding to references is best effort + -- we don't bind references to other modules. Mypyc trusts these + to be correct in compiled modules, so that it can replace a + constant expression (or a reference to one) with the statically + computed value. We don't want to infer constant values based on + stubs, in particular, as these might not match the implementation + (due to version skew, for example). + + Return None if unsuccessful. + """ + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, FloatExpr): + return expr.value + elif isinstance(expr, NameExpr): + if expr.name == "True": + return True + elif expr.name == "False": + return False + node = expr.node + if ( + isinstance(node, Var) + and node.is_final + and node.fullname.rsplit(".", 1)[0] == cur_mod_id + ): + value = node.final_value + if isinstance(value, (CONST_TYPES)): + return value + elif isinstance(expr, OpExpr): + left = constant_fold_expr(expr.left, cur_mod_id) + right = constant_fold_expr(expr.right, cur_mod_id) + if isinstance(left, int) and isinstance(right, int): + return constant_fold_binary_int_op(expr.op, left, right) + elif isinstance(left, str) and isinstance(right, str): + return constant_fold_binary_str_op(expr.op, left, right) + elif isinstance(expr, UnaryExpr): + value = constant_fold_expr(expr.expr, cur_mod_id) + if isinstance(value, int): + return constant_fold_unary_int_op(expr.op, value) + return None + + +def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: + if op == "+": + return left + right + if op == "-": + return left - right + elif op == "*": + return left * right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "&": + return left & right + elif op == "|": + return left | right + elif op == "^": + return left ^ right + elif op == "<<": + if right >= 0: + return left << right + elif op == ">>": + if right >= 0: + return left >> right + elif op == "**": + if right >= 0: + ret = left**right + assert isinstance(ret, int) + return ret + return None + + +def constant_fold_unary_int_op(op: str, value: int) -> int | None: + if op == "-": + return -value + elif op == "~": + return ~value + elif op == "+": + return value + return None + + +def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: + if op == "+": + return left + right + return None diff --git a/mypy/semanal.py b/mypy/semanal.py index 266dc891b697..fee66ae9b2cc 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -55,6 +55,7 @@ from typing_extensions import Final, TypeAlias as _TypeAlias from mypy import errorcodes as codes, message_registry +from mypy.constant_fold import constant_fold_expr from mypy.errorcodes import ErrorCode from mypy.errors import Errors, report_internal_error from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -91,7 +92,6 @@ AwaitExpr, Block, BreakStmt, - BytesExpr, CallExpr, CastExpr, ClassDef, @@ -108,7 +108,6 @@ Expression, ExpressionStmt, FakeExpression, - FloatExpr, ForStmt, FuncBase, FuncDef, @@ -121,7 +120,6 @@ ImportBase, ImportFrom, IndexExpr, - IntExpr, LambdaExpr, ListComprehension, ListExpr, @@ -250,7 +248,6 @@ FunctionLike, Instance, LiteralType, - LiteralValue, NoneType, Overloaded, Parameters, @@ -3138,7 +3135,8 @@ def store_final_status(self, s: AssignmentStmt) -> None: node = s.lvalues[0].node if isinstance(node, Var): node.is_final = True - node.final_value = self.unbox_literal(s.rvalue) + if s.type: + node.final_value = constant_fold_expr(s.rvalue, self.cur_mod_id) if self.is_class_scope() and ( isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs ): @@ -3198,13 +3196,6 @@ def flatten_lvalues(self, lvalues: list[Expression]) -> list[Expression]: res.append(lv) return res - def unbox_literal(self, e: Expression) -> int | float | bool | str | None: - if isinstance(e, (IntExpr, FloatExpr, StrExpr)): - return e.value - elif isinstance(e, NameExpr) and e.name in ("True", "False"): - return True if e.name == "True" else False - return None - def process_type_annotation(self, s: AssignmentStmt) -> None: """Analyze type annotation or infer simple literal type.""" if s.type: @@ -3259,39 +3250,33 @@ def is_annotated_protocol_member(self, s: AssignmentStmt) -> bool: def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: """Return builtins.int if rvalue is an int literal, etc. - If this is a 'Final' context, we return "Literal[...]" instead.""" - if self.options.semantic_analysis_only or self.function_stack: - # Skip this if we're only doing the semantic analysis pass. - # This is mostly to avoid breaking unit tests. - # Also skip inside a function; this is to avoid confusing + + If this is a 'Final' context, we return "Literal[...]" instead. + """ + if self.function_stack: + # Skip inside a function; this is to avoid confusing # the code that handles dead code due to isinstance() # inside type variables with value restrictions (like # AnyStr). return None - if isinstance(rvalue, FloatExpr): - return self.named_type_or_none("builtins.float") - - value: LiteralValue | None = None - type_name: str | None = None - if isinstance(rvalue, IntExpr): - value, type_name = rvalue.value, "builtins.int" - if isinstance(rvalue, StrExpr): - value, type_name = rvalue.value, "builtins.str" - if isinstance(rvalue, BytesExpr): - value, type_name = rvalue.value, "builtins.bytes" - - if type_name is not None: - assert value is not None - typ = self.named_type_or_none(type_name) - if typ and is_final: - return typ.copy_modified( - last_known_value=LiteralType( - value=value, fallback=typ, line=typ.line, column=typ.column - ) - ) - return typ - return None + value = constant_fold_expr(rvalue, self.cur_mod_id) + if value is None: + return None + + if isinstance(value, bool): + type_name = "builtins.bool" + elif isinstance(value, int): + type_name = "builtins.int" + elif isinstance(value, str): + type_name = "builtins.str" + elif isinstance(value, float): + type_name = "builtins.float" + + typ = self.named_type_or_none(type_name) + if typ and is_final: + return typ.copy_modified(last_known_value=LiteralType(value=value, fallback=typ)) + return typ def analyze_alias( self, name: str, rvalue: Expression, allow_placeholder: bool = False @@ -3827,6 +3812,14 @@ def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: var = lvalue.node var.type = typ var.is_ready = True + typ = get_proper_type(typ) + if ( + var.is_final + and isinstance(typ, Instance) + and typ.last_known_value + and (not self.type or not self.type.is_enum) + ): + var.final_value = typ.last_known_value.value # If node is not a variable, we'll catch it elsewhere. elif isinstance(lvalue, TupleExpr): typ = get_proper_type(typ) diff --git a/mypy/types.py b/mypy/types.py index b5a4f90d5ec3..0ba0985436ed 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -67,7 +67,10 @@ # Note: Although "Literal[None]" is a valid type, we internally always convert # such a type directly into "None". So, "None" is not a valid parameter of # LiteralType and is omitted from this list. -LiteralValue: _TypeAlias = Union[int, str, bool] +# +# Note: Float values are only used internally. They are not accepted within +# Literal[...]. +LiteralValue: _TypeAlias = Union[int, str, bool, float] # If we only import type_visitor in the middle of the file, mypy diff --git a/mypyc/irbuild/constant_fold.py b/mypyc/irbuild/constant_fold.py index 8d0a7fea5d90..4e9eb53b9222 100644 --- a/mypyc/irbuild/constant_fold.py +++ b/mypyc/irbuild/constant_fold.py @@ -1,6 +1,11 @@ """Constant folding of IR values. For example, 3 + 5 can be constant folded into 8. + +This is mostly like mypy.constant_fold, but we can bind some additional +NameExpr and MemberExpr references here, since we have more knowledge +about which definitions can be trusted -- we constant fold only references +to other compiled modules in the same compilation unit. """ from __future__ import annotations @@ -8,6 +13,11 @@ from typing import Union from typing_extensions import Final +from mypy.constant_fold import ( + constant_fold_binary_int_op, + constant_fold_binary_str_op, + constant_fold_unary_int_op, +) from mypy.nodes import Expression, IntExpr, MemberExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var from mypyc.irbuild.builder import IRBuilder @@ -51,52 +61,3 @@ def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | if isinstance(value, int): return constant_fold_unary_int_op(expr.op, value) return None - - -def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: - if op == "+": - return left + right - if op == "-": - return left - right - elif op == "*": - return left * right - elif op == "//": - if right != 0: - return left // right - elif op == "%": - if right != 0: - return left % right - elif op == "&": - return left & right - elif op == "|": - return left | right - elif op == "^": - return left ^ right - elif op == "<<": - if right >= 0: - return left << right - elif op == ">>": - if right >= 0: - return left >> right - elif op == "**": - if right >= 0: - ret = left**right - assert isinstance(ret, int) - return ret - return None - - -def constant_fold_unary_int_op(op: str, value: int) -> int | None: - if op == "-": - return -value - elif op == "~": - return ~value - elif op == "+": - return value - return None - - -def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: - if op == "+": - return left + right - return None diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 4f5c9487bb1d..16b085ad4927 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -3273,7 +3273,7 @@ L2: [case testFinalStaticInt] from typing import Final -x: Final = 1 + 1 +x: Final = 1 + int() def f() -> int: return x - 1 diff --git a/mypyc/test-data/irbuild-constant-fold.test b/mypyc/test-data/irbuild-constant-fold.test index dd75c01443f1..7d9127887aa6 100644 --- a/mypyc/test-data/irbuild-constant-fold.test +++ b/mypyc/test-data/irbuild-constant-fold.test @@ -205,23 +205,13 @@ Y: Final = 2 + 4 def f() -> None: a = X + 1 - # TODO: Constant fold this as well a = Y + 1 [out] def f(): - a, r0 :: int - r1 :: bool - r2 :: int + a :: int L0: a = 12 - r0 = __main__.Y :: static - if is_error(r0) goto L1 else goto L2 -L1: - r1 = raise NameError('value for final name "Y" was not set') - unreachable -L2: - r2 = CPyTagged_Add(r0, 2) - a = r2 + a = 14 return 1 [case testIntConstantFoldingClassFinal] diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index ecedab2cd45d..9c942ea75219 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1663,3 +1663,44 @@ L1: x = 12 L2: return 1 + +[case testI64FinalConstants] +from typing_extensions import Final +from mypy_extensions import i64 + +A: Final = -1 +B: Final = -(1 + 3*2) +C: Final = 0 +D: Final = A - B +E: Final[i64] = 1 + 3 + +def f1() -> i64: + return A + +def f2() -> i64: + return A + B + +def f3() -> i64: + return C + +def f4() -> i64: + return D + +def f5() -> i64: + return E +[out] +def f1(): +L0: + return -1 +def f2(): +L0: + return -8 +def f3(): +L0: + return 0 +def f4(): +L0: + return 6 +def f5(): +L0: + return 4 diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index 8bf43cfa4923..e193c16ef979 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -155,3 +155,51 @@ def divby8(x): L0: r0 = CPyTagged_Rshift(x, 6) return r0 + +[case testFinalConstantFolding] +from typing_extensions import Final + +X: Final = -1 +Y: Final = -(1 + 3*2) +Z: Final = Y + 1 + +class C: + A: Final = 1 + B: Final = -1 + +def f1() -> int: + return X + +def f2() -> int: + return X + Y + +def f3() -> int: + return Z + +def f4() -> int: + return C.A + +def f5() -> int: + return C.B +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C +L0: + __mypyc_self__.A = 2 + __mypyc_self__.B = -2 + return 1 +def f1(): +L0: + return -2 +def f2(): +L0: + return -16 +def f3(): +L0: + return -12 +def f4(): +L0: + return 2 +def f5(): +L0: + return -2 diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index 177bae0cc895..2af519dc7aa8 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -2256,3 +2256,14 @@ class Derived(Base): pass assert Derived()() == 1 + +[case testClassWithFinalAttribute] +from typing_extensions import Final + +class C: + A: Final = -1 + a: Final = [A] + +def test_final_attribute() -> None: + assert C.A == -1 + assert C.a == [-1] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index f7aa43d43f3e..6b42141b2b15 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1724,7 +1724,7 @@ reveal_type = 1 [case testRevealForward] def f() -> None: reveal_type(x) -x = 1 + 1 +x = 1 + int() [out] main:2: note: Revealed type is "builtins.int" diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 6767f1c7995c..45a833e5210c 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1979,7 +1979,7 @@ class A: [case testMultipassAndTopLevelVariable] y = x # E: Cannot determine type of "x" y() -x = 1+0 +x = 1+int() [out] [case testMultipassAndDecoratedMethod] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index b3267f66653d..26bd0f92ed9e 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1348,13 +1348,13 @@ import a import b def f() -> int: return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/b.py:3: note: Revealed type is "builtins.int" @@ -1365,12 +1365,12 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:3: note: Revealed type is "builtins.int" @@ -1385,7 +1385,7 @@ class C: self.x2 = b.b [file b.py] import a -b = 1 + 1 +b = 1 + int() [out] tmp/a.py:4: error: Cannot determine type of "x2" @@ -1398,7 +1398,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1411,7 +1411,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1424,7 +1424,7 @@ def g() -> None: @b.deco def f(a: str) -> int: pass reveal_type(f) -x = 1 + 1 +x = 1 + int() [file b.py] from typing import Callable, TypeVar import a diff --git a/test-data/unit/errorstream.test b/test-data/unit/errorstream.test index 8a73748d27ff..46af433f8916 100644 --- a/test-data/unit/errorstream.test +++ b/test-data/unit/errorstream.test @@ -36,14 +36,14 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y 1 / '' -x = 1 + 1 +x = 1 + int() [out] ==== Errors flushed ==== diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test index 4b60ab99f869..870c686807c3 100644 --- a/test-data/unit/semanal-basic.test +++ b/test-data/unit/semanal-basic.test @@ -8,8 +8,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( NameExpr(x [__main__.x]))) @@ -25,8 +26,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(2)) AssignmentStmt:2( - NameExpr(z* [__main__.z]) - IntExpr(3)) + NameExpr(z [__main__.z]) + IntExpr(3) + builtins.int) ExpressionStmt:3( TupleExpr:3( NameExpr(x [__main__.x]) @@ -61,8 +63,9 @@ MypyFile:1( NameExpr(f [__main__.f]) Args())) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:4( f Block:4( @@ -117,8 +120,9 @@ MypyFile:1( NameExpr(g [__main__.g]) Args())))) AssignmentStmt:4( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:5( g Block:5( @@ -134,8 +138,9 @@ def f(y): [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(2)) @@ -163,8 +168,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:2( f Block:2( @@ -498,17 +504,21 @@ MypyFile:1( ExpressionStmt:3( Ellipsis))) AssignmentStmt:4( - NameExpr(x* [__main__.x] = 1) - IntExpr(1)) + NameExpr(x [__main__.x] = 1) + IntExpr(1) + Literal[1]?) AssignmentStmt:5( - NameExpr(y* [__main__.y] = 1.0) - FloatExpr(1.0)) + NameExpr(y [__main__.y] = 1.0) + FloatExpr(1.0) + Literal[1.0]?) AssignmentStmt:6( - NameExpr(s* [__main__.s] = hi) - StrExpr(hi)) + NameExpr(s [__main__.s] = hi) + StrExpr(hi) + Literal['hi']?) AssignmentStmt:7( - NameExpr(t* [__main__.t] = True) - NameExpr(True [builtins.True])) + NameExpr(t [__main__.t] = True) + NameExpr(True [builtins.True]) + Literal[True]?) AssignmentStmt:8( NameExpr(n* [__main__.n] = None) CallExpr:8( diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test index 082a3fe69050..86f8b8656fb6 100644 --- a/test-data/unit/semanal-classes.test +++ b/test-data/unit/semanal-classes.test @@ -248,8 +248,9 @@ MypyFile:1( ClassDef:1( A AssignmentStmt:2( - NameExpr(x* [m]) - IntExpr(1)) + NameExpr(x [m]) + IntExpr(1) + builtins.int) AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))) @@ -287,8 +288,9 @@ MypyFile:1( NameExpr(A [__main__.A])) Then( AssignmentStmt:3( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) Else( AssignmentStmt:5( NameExpr(x [__main__.A.x]) @@ -541,8 +543,9 @@ MypyFile:1( ClassDef:2( A AssignmentStmt:3( - NameExpr(X* [m]) - IntExpr(1)) + NameExpr(X [m]) + IntExpr(1) + builtins.int) FuncDef:4( f Args( diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test index 98bf32708f1b..fa07e533a842 100644 --- a/test-data/unit/semanal-expressions.test +++ b/test-data/unit/semanal-expressions.test @@ -15,8 +15,9 @@ x.y [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( MemberExpr:2( NameExpr(x [__main__.x]) @@ -80,8 +81,9 @@ not x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( UnaryExpr:2( - @@ -187,8 +189,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( @@ -223,8 +226,9 @@ b = [x for x in a if x] [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) ListComprehension:2( @@ -240,8 +244,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( SetComprehension:2( GeneratorExpr:2( @@ -258,8 +263,9 @@ b = {x for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) SetComprehension:2( @@ -275,8 +281,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( DictionaryComprehension:2( NameExpr(x [l]) @@ -293,8 +300,9 @@ b = {x: x + 1 for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) DictionaryComprehension:2( @@ -313,8 +321,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -327,8 +336,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -345,8 +355,9 @@ lambda: x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:2( LambdaExpr:2( Block:2( diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test index 16b9a9b18250..8ffd7d2488dc 100644 --- a/test-data/unit/semanal-modules.test +++ b/test-data/unit/semanal-modules.test @@ -16,8 +16,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testImportedNameInType] import m @@ -51,8 +52,9 @@ MypyFile:1( MypyFile:1( tmp/m.py AssignmentStmt:1( - NameExpr(y* [m.y]) - IntExpr(1))) + NameExpr(y [m.y]) + IntExpr(1) + builtins.int)) [case testImportFromType] from m import c @@ -342,8 +344,9 @@ MypyFile:1( MypyFile:1( tmp/m/n.py AssignmentStmt:1( - NameExpr(x* [m.n.x]) - IntExpr(1))) + NameExpr(x [m.n.x]) + IntExpr(1) + builtins.int)) [case testImportFromSubmodule] from m._n import x @@ -448,8 +451,9 @@ MypyFile:1( MypyFile:1( tmp/m/n/k.py AssignmentStmt:1( - NameExpr(x* [m.n.k.x]) - IntExpr(1))) + NameExpr(x [m.n.k.x]) + IntExpr(1) + builtins.int)) [case testImportInSubmodule] import m._n @@ -609,8 +613,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport0] import m.x @@ -637,8 +642,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(1))) + NameExpr(y [m.z.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport1] import m.t.b as b @@ -673,13 +679,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport2] import m.t.b as b @@ -712,13 +720,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport3] import m.t @@ -762,8 +772,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(zy* [m.z.zy]) - IntExpr(3))) + NameExpr(zy [m.z.zy]) + IntExpr(3) + builtins.int)) [case testRelativeImportFromSameModule] import m.x @@ -914,5 +925,6 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(a* [x.a]) - IntExpr(1))) + NameExpr(a [x.a]) + IntExpr(1) + builtins.int)) diff --git a/test-data/unit/semanal-python310.test b/test-data/unit/semanal-python310.test index a009636575dc..9418ac2912b2 100644 --- a/test-data/unit/semanal-python310.test +++ b/test-data/unit/semanal-python310.test @@ -8,8 +8,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -28,8 +29,9 @@ a [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -49,8 +51,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -78,8 +81,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -99,8 +103,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -121,11 +126,13 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( - NameExpr(a* [__main__.a]) - IntExpr(1)) + NameExpr(a [__main__.a]) + IntExpr(1) + builtins.int) MatchStmt:3( NameExpr(x [__main__.x]) Pattern( @@ -144,8 +151,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -164,8 +172,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -191,8 +200,9 @@ b = 1 MypyFile:1( Import:1(_a) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:4( NameExpr(x [__main__.x]) Pattern( diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index fdc5ca2bbbdd..27ff101c04d0 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -272,8 +272,9 @@ else: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -326,8 +327,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( - NameExpr(xx* [__main__.xx]) - IntExpr(1)) + NameExpr(xx [__main__.xx]) + IntExpr(1) + builtins.int) AssignmentStmt:3( MemberExpr:3( NameExpr(x [__main__.x]) @@ -408,8 +410,9 @@ MypyFile:1( [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y])) @@ -436,8 +439,9 @@ y, x = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -450,8 +454,9 @@ y, (x, z) = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -468,8 +473,9 @@ if x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -510,8 +516,9 @@ del x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) DelStmt:2( NameExpr(x [__main__.x]))) @@ -961,16 +968,18 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x'* [__main__.x']) - IntExpr(0)) + NameExpr(x' [__main__.x']) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(x' [__main__.x'])))) AssignmentStmt:5( - NameExpr(x* [__main__.x]) - StrExpr()) + NameExpr(x [__main__.x]) + StrExpr() + builtins.str) ExpressionStmt:6( CallExpr:6( NameExpr(f [__main__.f]) @@ -993,8 +1002,9 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) @@ -1046,15 +1056,17 @@ x = '' [out] MypyFile:1( AssignmentStmt:2( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:3( NameExpr(x [__main__.x])) ClassDef:4( A AssignmentStmt:5( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) AssignmentStmt:6( NameExpr(x [__main__.x]) StrExpr())) @@ -1114,3 +1126,191 @@ MypyFile:1( AssignmentStmt:5( NameExpr(y [__main__.y]) IntExpr(1))))) + +[case testConstantFold1] +from typing_extensions import Final +add: Final = 15 + 47 +add_mul: Final = (2 + 3) * 5 +sub: Final = 7 - 11 +bit_and: Final = 6 & 10 +bit_or: Final = 6 | 10 +bit_xor: Final = 6 ^ 10 +lshift: Final = 5 << 2 +rshift: Final = 13 >> 2 +lshift0: Final = 5 << 0 +rshift0: Final = 13 >> 0 +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(add [__main__.add] = 62) + OpExpr:2( + + + IntExpr(15) + IntExpr(47)) + Literal[62]?) + AssignmentStmt:3( + NameExpr(add_mul [__main__.add_mul] = 25) + OpExpr:3( + * + OpExpr:3( + + + IntExpr(2) + IntExpr(3)) + IntExpr(5)) + Literal[25]?) + AssignmentStmt:4( + NameExpr(sub [__main__.sub] = -4) + OpExpr:4( + - + IntExpr(7) + IntExpr(11)) + Literal[-4]?) + AssignmentStmt:5( + NameExpr(bit_and [__main__.bit_and] = 2) + OpExpr:5( + & + IntExpr(6) + IntExpr(10)) + Literal[2]?) + AssignmentStmt:6( + NameExpr(bit_or [__main__.bit_or] = 14) + OpExpr:6( + | + IntExpr(6) + IntExpr(10)) + Literal[14]?) + AssignmentStmt:7( + NameExpr(bit_xor [__main__.bit_xor] = 12) + OpExpr:7( + ^ + IntExpr(6) + IntExpr(10)) + Literal[12]?) + AssignmentStmt:8( + NameExpr(lshift [__main__.lshift] = 20) + OpExpr:8( + << + IntExpr(5) + IntExpr(2)) + Literal[20]?) + AssignmentStmt:9( + NameExpr(rshift [__main__.rshift] = 3) + OpExpr:9( + >> + IntExpr(13) + IntExpr(2)) + Literal[3]?) + AssignmentStmt:10( + NameExpr(lshift0 [__main__.lshift0] = 5) + OpExpr:10( + << + IntExpr(5) + IntExpr(0)) + Literal[5]?) + AssignmentStmt:11( + NameExpr(rshift0 [__main__.rshift0] = 13) + OpExpr:11( + >> + IntExpr(13) + IntExpr(0)) + Literal[13]?)) + +[case testConstantFold2] +from typing_extensions import Final +neg1: Final = -5 +neg2: Final = --1 +neg3: Final = -0 +pos: Final = +5 +inverted1: Final = ~0 +inverted2: Final = ~5 +inverted3: Final = ~3 +p0: Final = 3**0 +p1: Final = 3**5 +p2: Final = (-5)**3 +p3: Final = 0**0 +s: Final = 'x' + 'y' +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(neg1 [__main__.neg1] = -5) + UnaryExpr:2( + - + IntExpr(5)) + Literal[-5]?) + AssignmentStmt:3( + NameExpr(neg2 [__main__.neg2] = 1) + UnaryExpr:3( + - + UnaryExpr:3( + - + IntExpr(1))) + Literal[1]?) + AssignmentStmt:4( + NameExpr(neg3 [__main__.neg3] = 0) + UnaryExpr:4( + - + IntExpr(0)) + Literal[0]?) + AssignmentStmt:5( + NameExpr(pos [__main__.pos] = 5) + UnaryExpr:5( + + + IntExpr(5)) + Literal[5]?) + AssignmentStmt:6( + NameExpr(inverted1 [__main__.inverted1] = -1) + UnaryExpr:6( + ~ + IntExpr(0)) + Literal[-1]?) + AssignmentStmt:7( + NameExpr(inverted2 [__main__.inverted2] = -6) + UnaryExpr:7( + ~ + IntExpr(5)) + Literal[-6]?) + AssignmentStmt:8( + NameExpr(inverted3 [__main__.inverted3] = -4) + UnaryExpr:8( + ~ + IntExpr(3)) + Literal[-4]?) + AssignmentStmt:9( + NameExpr(p0 [__main__.p0] = 1) + OpExpr:9( + ** + IntExpr(3) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:10( + NameExpr(p1 [__main__.p1] = 243) + OpExpr:10( + ** + IntExpr(3) + IntExpr(5)) + Literal[243]?) + AssignmentStmt:11( + NameExpr(p2 [__main__.p2] = -125) + OpExpr:11( + ** + UnaryExpr:11( + - + IntExpr(5)) + IntExpr(3)) + Literal[-125]?) + AssignmentStmt:12( + NameExpr(p3 [__main__.p3] = 1) + OpExpr:12( + ** + IntExpr(0) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:13( + NameExpr(s [__main__.s] = xy) + OpExpr:13( + + + StrExpr(x) + StrExpr(y)) + Literal['xy']?)) diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test index bdf4f52ae5fc..c886080557b0 100644 --- a/test-data/unit/semanal-symtable.test +++ b/test-data/unit/semanal-symtable.test @@ -9,7 +9,7 @@ x = 1 [out] __main__: SymbolTable( - x : Gdef/Var (__main__.x)) + x : Gdef/Var (__main__.x) : builtins.int) [case testFuncDef] def f(): pass @@ -35,7 +35,7 @@ __main__: m : Gdef/MypyFile (m)) m: SymbolTable( - x : Gdef/Var (m.x)) + x : Gdef/Var (m.x) : builtins.int) [case testImportFromModule] from m import x @@ -49,7 +49,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testImportAs] from m import x as xx @@ -63,7 +63,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testFailingImports] from sys import non_existing1 # type: ignore @@ -80,7 +80,7 @@ __main__: non_existing4 : Gdef/Var (__main__.non_existing4) : Any) sys: SymbolTable( - platform : Gdef/Var (sys.platform) + platform : Gdef/Var (sys.platform) : builtins.str version_info : Gdef/Var (sys.version_info)) [case testDecorator] @@ -95,6 +95,6 @@ def g() -> None: [out] __main__: SymbolTable( - Callable : Gdef/Var (typing.Callable) + Callable : Gdef/Var (typing.Callable) : builtins.int dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def () g : Gdef/Decorator (__main__.g) : def ()) From 96ac8b3e71c743b02ea4e3c84da0248659e40f82 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 16 Dec 2022 15:43:58 -0800 Subject: [PATCH 118/292] [partially defined] implement support for try statements (#14114) This adds support for try/except/finally/else check. The implementation ended up pretty complicated because it had to handle jumps different for finally. It took me a few iterations to get to this solution and that's the cleanest one I could come up with. Closes #13928. --- mypy/partially_defined.py | 113 +++++++++++- test-data/unit/check-possibly-undefined.test | 184 +++++++++++++++++++ 2 files changed, 295 insertions(+), 2 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 5f5253515b61..9b3e105f64ef 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -31,6 +31,7 @@ RefExpr, ReturnStmt, StarExpr, + TryStmt, TupleExpr, WhileStmt, WithStmt, @@ -66,6 +67,13 @@ def __init__( self.must_be_defined = set(must_be_defined) self.skipped = skipped + def copy(self) -> BranchState: + return BranchState( + must_be_defined=set(self.must_be_defined), + may_be_defined=set(self.may_be_defined), + skipped=self.skipped, + ) + class BranchStatement: def __init__(self, initial_state: BranchState) -> None: @@ -77,6 +85,11 @@ def __init__(self, initial_state: BranchState) -> None: ) ] + def copy(self) -> BranchStatement: + result = BranchStatement(self.initial_state) + result.branches = [b.copy() for b in self.branches] + return result + def next_branch(self) -> None: self.branches.append( BranchState( @@ -90,6 +103,11 @@ def record_definition(self, name: str) -> None: self.branches[-1].must_be_defined.add(name) self.branches[-1].may_be_defined.discard(name) + def delete_var(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.discard(name) + self.branches[-1].may_be_defined.discard(name) + def record_nested_branch(self, state: BranchState) -> None: assert len(self.branches) > 0 current_branch = self.branches[-1] @@ -151,6 +169,11 @@ def __init__(self, stmts: list[BranchStatement]) -> None: self.branch_stmts: list[BranchStatement] = stmts self.undefined_refs: dict[str, set[NameExpr]] = {} + def copy(self) -> Scope: + result = Scope([s.copy() for s in self.branch_stmts]) + result.undefined_refs = self.undefined_refs.copy() + return result + def record_undefined_ref(self, o: NameExpr) -> None: if o.name not in self.undefined_refs: self.undefined_refs[o.name] = set() @@ -166,6 +189,15 @@ class DefinedVariableTracker: def __init__(self) -> None: # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())])] + # disable_branch_skip is used to disable skipping a branch due to a return/raise/etc. This is useful + # in things like try/except/finally statements. + self.disable_branch_skip = False + + def copy(self) -> DefinedVariableTracker: + result = DefinedVariableTracker() + result.scopes = [s.copy() for s in self.scopes] + result.disable_branch_skip = self.disable_branch_skip + return result def _scope(self) -> Scope: assert len(self.scopes) > 0 @@ -195,7 +227,7 @@ def end_branch_statement(self) -> None: def skip_branch(self) -> None: # Only skip branch if we're outside of "root" branch statement. - if len(self._scope().branch_stmts) > 1: + if len(self._scope().branch_stmts) > 1 and not self.disable_branch_skip: self._scope().branch_stmts[-1].skip_branch() def record_definition(self, name: str) -> None: @@ -203,6 +235,11 @@ def record_definition(self, name: str) -> None: assert len(self.scopes[-1].branch_stmts) > 0 self._scope().branch_stmts[-1].record_definition(name) + def delete_var(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].delete_var(name) + def record_undefined_ref(self, o: NameExpr) -> None: """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" assert len(self.scopes) > 0 @@ -268,6 +305,7 @@ def __init__( self.type_map = type_map self.options = options self.loops: list[Loop] = [] + self.try_depth = 0 self.tracker = DefinedVariableTracker() for name in implicit_module_attrs: self.tracker.record_definition(name) @@ -432,6 +470,75 @@ def visit_expression_stmt(self, o: ExpressionStmt) -> None: self.tracker.skip_branch() super().visit_expression_stmt(o) + def visit_try_stmt(self, o: TryStmt) -> None: + """ + Note that finding undefined vars in `finally` requires different handling from + the rest of the code. In particular, we want to disallow skipping branches due to jump + statements in except/else clauses for finally but not for other cases. Imagine a case like: + def f() -> int: + try: + x = 1 + except: + # This jump statement needs to be handled differently depending on whether or + # not we're trying to process `finally` or not. + return 0 + finally: + # `x` may be undefined here. + pass + # `x` is always defined here. + return x + """ + self.try_depth += 1 + if o.finally_body is not None: + # In order to find undefined vars in `finally`, we need to + # process try/except with branch skipping disabled. However, for the rest of the code + # after finally, we need to process try/except with branch skipping enabled. + # Therefore, we need to process try/finally twice. + # Because processing is not idempotent, we should make a copy of the tracker. + old_tracker = self.tracker.copy() + self.tracker.disable_branch_skip = True + self.process_try_stmt(o) + self.tracker = old_tracker + self.process_try_stmt(o) + self.try_depth -= 1 + + def process_try_stmt(self, o: TryStmt) -> None: + """ + Processes try statement decomposing it into the following: + if ...: + body + else_body + elif ...: + except 1 + elif ...: + except 2 + else: + except n + finally + """ + self.tracker.start_branch_statement() + o.body.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if len(o.handlers) > 0: + assert len(o.handlers) == len(o.vars) == len(o.types) + for i in range(len(o.handlers)): + self.tracker.next_branch() + exc_type = o.types[i] + if exc_type is not None: + exc_type.accept(self) + var = o.vars[i] + if var is not None: + self.process_definition(var.name) + var.accept(self) + o.handlers[i].accept(self) + if var is not None: + self.tracker.delete_var(var.name) + self.tracker.end_branch_statement() + + if o.finally_body is not None: + o.finally_body.accept(self) + def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) self.tracker.start_branch_statement() @@ -478,7 +585,9 @@ def visit_name_expr(self, o: NameExpr) -> None: self.tracker.record_definition(o.name) elif self.tracker.is_defined_in_different_branch(o.name): # A variable is defined in one branch but used in a different branch. - if self.loops: + if self.loops or self.try_depth > 0: + # If we're in a loop or in a try, we can't be sure that this variable + # is undefined. Report it as "may be undefined". self.variable_may_be_undefined(o.name, o) else: self.var_used_before_def(o.name, o) diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index d99943572a38..ee7020252de8 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -525,6 +525,190 @@ def f3() -> None: y = x z = x # E: Name "x" may be undefined +[case testTryBasic] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + except: + pass + return x # E: Name "x" may be undefined + +def f2() -> int: + try: + pass + except: + x = 1 + return x # E: Name "x" may be undefined + +def f3() -> int: + try: + x = 1 + except: + y = x # E: Name "x" may be undefined + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + return 0 + return x + +def f5() -> int: + try: + x = 1 + except: + raise + return x + +def f6() -> None: + try: + pass + except BaseException as exc: + x = exc # No error. + exc = BaseException() + # This case is covered by the other check, not by possibly undefined check. + y = exc # E: Trying to read deleted variable "exc" + +def f7() -> int: + try: + if int(): + x = 1 + assert False + except: + pass + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryMultiExcept] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + x = 1 + except BaseException: + x = 2 + except: + x = 3 + return x + +def f2() -> int: + try: + x = 1 + except BaseException: + pass + except: + x = 3 + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryFinally] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + finally: + x = 2 + return x + +def f2() -> int: + try: + pass + except: + pass + finally: + x = 2 + return x + +def f3() -> int: + try: + x = 1 + except: + pass + finally: + y = x # E: Name "x" may be undefined + return x + +def f4() -> int: + try: + x = 0 + except BaseException: + raise + finally: + y = x # E: Name "x" may be undefined + return y + +def f5() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + pass + return x # No error. + +def f6() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + a = x # E: Name "x" may be undefined + return a +[builtins fixtures/exception.pyi] + +[case testTryElse] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + return 0 + except BaseException: + x = 1 + else: + x = 2 + finally: + y = x + return y + +def f2() -> int: + try: + pass + except: + x = 1 + else: + x = 2 + return x + +def f3() -> int: + try: + pass + except: + x = 1 + else: + pass + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + x = 2 + else: + pass + return x + +def f5() -> int: + try: + pass + except: + x = 1 + else: + return 1 + return x +[builtins fixtures/exception.pyi] + [case testNoReturn] # flags: --enable-error-code possibly-undefined From b1388e0b479868623f9bcb18491f8b041c68e512 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Mon, 19 Dec 2022 06:48:30 -0800 Subject: [PATCH 119/292] [used-before-def] fix false positive inside loop (#14307) A similar case was addressed in #14176 but I missed the part where it doesn't need to be defined in a different branch. This makes the fix more complete. --- mypy/partially_defined.py | 5 ++++- test-data/unit/check-possibly-undefined.test | 7 ++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 9b3e105f64ef..fd322bbf0ad6 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -322,7 +322,10 @@ def process_definition(self, name: str) -> None: # Was this name previously used? If yes, it's a used-before-definition error. refs = self.tracker.pop_undefined_ref(name) for ref in refs: - self.var_used_before_def(name, ref) + if self.loops: + self.variable_may_be_undefined(name, ref) + else: + self.var_used_before_def(name, ref) self.tracker.record_definition(name) def visit_global_decl(self, o: GlobalDecl) -> None: diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index ee7020252de8..c632225c540b 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -382,7 +382,7 @@ def f2() -> None: x = 2 w = x # No error. -[case testDefinedDifferentBranchPossiblyUndefined] +[case testPossiblyUndefinedLoop] # flags: --enable-error-code possibly-undefined --enable-error-code used-before-def def f0() -> None: @@ -423,6 +423,11 @@ def f3() -> None: else: y = x # E: Name "x" may be undefined +def f4() -> None: + while int(): + y = x # E: Name "x" may be undefined + x: int = 1 + [case testAssert] # flags: --enable-error-code possibly-undefined def f1() -> int: From d391f108c195533a5800afe73f5b3257babc83a3 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Mon, 19 Dec 2022 06:49:33 -0800 Subject: [PATCH 120/292] [undefined vars] skip visiting unreachable else clauses (#14308) In particular, ran into an issue with an `if TYPE_CHECKING` case, so I added a test for that. --- mypy/partially_defined.py | 5 +-- test-data/unit/check-possibly-undefined.test | 34 ++++++++++++-------- 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index fd322bbf0ad6..644a47248615 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -366,9 +366,10 @@ def visit_if_stmt(self, o: IfStmt) -> None: b.accept(self) self.tracker.next_branch() if o.else_body: - if o.else_body.is_unreachable: + if not o.else_body.is_unreachable: + o.else_body.accept(self) + else: self.tracker.skip_branch() - o.else_body.accept(self) self.tracker.end_branch_statement() def visit_match_stmt(self, o: MatchStmt) -> None: diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index c632225c540b..aa535a1ce081 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -771,22 +771,30 @@ def f() -> None: [builtins fixtures/tuple.pyi] [case testUnreachable] -# flags: --enable-error-code possibly-undefined +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def import typing -if typing.TYPE_CHECKING: - x = 1 -elif int(): - y = 1 -else: - y = 2 -a = x +def f0() -> None: + if typing.TYPE_CHECKING: + x = 1 + elif int(): + y = 1 + else: + y = 2 + a = x -if not typing.TYPE_CHECKING: - pass -else: - z = 1 -a = z +def f1() -> None: + if not typing.TYPE_CHECKING: + pass + else: + z = 1 + a = z + +def f2() -> None: + if typing.TYPE_CHECKING: + x = 1 + else: + y = x [typing fixtures/typing-medium.pyi] [case testUsedBeforeDef] From 97d9ed59a0e33c86efd37fa90a4cbdb53c9b62a0 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 19 Dec 2022 17:06:21 +0100 Subject: [PATCH 121/292] Add basic support for `typing_extensions.TypeVar` (#14313) This PR only adds the existing `TypeVar` support for the `typing_extensions` variant. I.e. it does not include support for `default` or `infer_variance`. Fixes #14312 --- mypy/semanal.py | 2 +- test-data/unit/check-generics.test | 33 +++++++++++++++++++ test-data/unit/lib-stub/typing_extensions.pyi | 7 ++-- 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index fee66ae9b2cc..916009702830 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3847,7 +3847,7 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: Return True if this looks like a type variable declaration (but maybe with errors), otherwise return False. """ - call = self.get_typevarlike_declaration(s, ("typing.TypeVar",)) + call = self.get_typevarlike_declaration(s, ("typing.TypeVar", "typing_extensions.TypeVar")) if not call: return False diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index dd7e31528a4f..27441ce908fe 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2663,3 +2663,36 @@ def foo(x: A) -> A: # N: (Hint: Use "B" in function signature to bind "B" inside a function) return y.x return bar()[0] + + +-- TypeVar imported from typing_extensions +-- --------------------------------------- + +[case testTypeVarTypingExtensionsSimpleGeneric] +from typing import Generic +from typing_extensions import TypeVar + +T = TypeVar("T") + +class A(Generic[T]): + def __init__(self, value: T) -> None: + self.value = value + +a: A = A(8) +b: A[str] = A("") + +reveal_type(A(1.23)) # N: Revealed type is "__main__.A[builtins.float]" + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTypingExtensionsSimpleBound] +from typing_extensions import TypeVar + +T= TypeVar("T") + +def func(var: T) -> T: + return var + +reveal_type(func(1)) # N: Revealed type is "builtins.int" + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index e92f7e913502..cbf692fc7111 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -1,10 +1,11 @@ -from typing import TypeVar, Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type +import typing +from typing import Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import NewType as NewType, overload as overload import sys -_T = TypeVar('_T') +_T = typing.TypeVar('_T') class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: @@ -25,6 +26,8 @@ Literal: _SpecialForm = ... Annotated: _SpecialForm = ... +TypeVar: _SpecialForm + ParamSpec: _SpecialForm Concatenate: _SpecialForm From d5dc1fb4cc6ff969f125f5fb8bc51c2a29659bc1 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 19 Dec 2022 23:27:50 +0100 Subject: [PATCH 122/292] Add `--debug-serialize` option (#14155) Currently, `mypy_primer` sets `--cache-dir=/dev/null` which disables cache generation. This can result in errors being missed which would normally come up during `tree.serialize()`. Removing `--cache-dir=/dev/null` isn't practical. This PR adds a new debug / test option `--debug-serialize` which runs `tree.serialize()` even if cache generation is disabled to help detect serialize errors earlier. **Refs** * #14137 * https://github.com/hauntsaninja/mypy_primer/pull/54#pullrequestreview-1187145602 cc: @hauntsaninja --- mypy/build.py | 6 ++++++ mypy/main.py | 3 +++ mypy/options.py | 3 +++ 3 files changed, 12 insertions(+) diff --git a/mypy/build.py b/mypy/build.py index 7da3e71ce25e..d2bcd572969e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2479,6 +2479,12 @@ def write_cache(self) -> None: or self.options.cache_dir == os.devnull or self.options.fine_grained_incremental ): + if self.options.debug_serialize: + try: + self.tree.serialize() + except Exception: + print(f"Error serializing {self.id}", file=self.manager.stdout) + raise # Propagate to display traceback return is_errors = self.transitive_error if is_errors: diff --git a/mypy/main.py b/mypy/main.py index 8f60d13074a0..47dea2ae9797 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1126,6 +1126,9 @@ def add_invertible_flag( parser.add_argument( "--cache-map", nargs="+", dest="special-opts:cache_map", help=argparse.SUPPRESS ) + # --debug-serialize will run tree.serialize() even if cache generation is disabled. + # Useful for mypy_primer to detect serialize errors earlier. + parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS) # This one is deprecated, but we will keep it for few releases. parser.add_argument( "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS diff --git a/mypy/options.py b/mypy/options.py index ffb6b201e70b..92c96a92c531 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -249,6 +249,9 @@ def __init__(self) -> None: # Read cache files in fine-grained incremental mode (cache must include dependencies) self.use_fine_grained_cache = False + # Run tree.serialize() even if cache generation is disabled + self.debug_serialize = False + # Tune certain behaviors when being used as a front-end to mypyc. Set per-module # in modules being compiled. Not in the config file or command line. self.mypyc = False From 2e3144c6a27cbe15d22c5240100b96b98cba6dbd Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 20 Dec 2022 00:25:00 +0100 Subject: [PATCH 123/292] Enable `--debug-serialize` for mypy_primer (#14318) Enable the `--debug-serialize` option to help catch issues during serialization which would normally be skipped by mypy_primer. Followup to #14155 --- .github/workflows/mypy_primer.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index d26372aa6635..9eef1c1c7466 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -62,6 +62,7 @@ jobs: --new $GITHUB_SHA --old base_commit \ --num-shards 5 --shard-index ${{ matrix.shard-index }} \ --debug \ + --additional-flags="--debug-serialize" \ --output concise \ | tee diff_${{ matrix.shard-index }}.txt ) || [ $? -eq 1 ] From fcd705d3149906d1fa892c4ae9e5458275007b24 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 20 Dec 2022 07:09:05 +0000 Subject: [PATCH 124/292] Update `stubinfo.py` for recent typeshed changes (#14265) Removals from `stubinfo.py`: - `atomicwrites` is archived and deprecated at runtime; stubs were removed from typeshed in https://github.com/python/typeshed/pull/8925 - `attrs` has had inline types for a very long time now - `chardet` recently cut a release with inline types; typeshed's stubs were marked obsolete in https://github.com/python/typeshed/pull/9318 - `cryptography` has had inline types for a very long time now; the only reason why it's still in typeshed is because other typeshed packages need `types-cryptography` as a dependency, and our testing infrastructure therefore can't currently cope with it being removed from typeshed. - `emoji` recently cut a release bundling stubs with the runtime package; typeshed's stubs were marked obsolete in https://github.com/python/typeshed/pull/9051 - `termcolor` recently cut a release with inline types; typeshed's stubs were marked obsolete in https://github.com/python/typeshed/pull/8746 - `prettytable` recently cut a release with inline types; typeshed's stubs were marked obsolete in https://github.com/python/typeshed/pull/9023 Additions: - Stubs for `Xlib` were added in https://github.com/python/typeshed/pull/9279 - Stubs for `consolemenu` were added in https://github.com/python/typeshed/pull/8820 - Stubs for `dockerfile_parse` were added in https://github.com/python/typeshed/pull/9305 - Stubs for `flask_migrate` were added in https://github.com/python/typeshed/pull/8967 - Stubs for `paho.mqtt` were added in https://github.com/python/typeshed/pull/8853 - Stubs for `pycocotools` were added in https://github.com/python/typeshed/pull/9086 - Stubs for many `pywin32` modules were added in https://github.com/python/typeshed/pull/8825, and multiple follow-up PRs - Stubs for `pyscreeze` were added in https://github.com/python/typeshed/pull/8823 --- mypy/stubinfo.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index b8dea5d0046b..15bd96d9f4b4 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -20,14 +20,11 @@ def stub_package_name(prefix: str) -> str: # Package name can have one or two components ('a' or 'a.b'). legacy_bundled_packages = { "aiofiles": "types-aiofiles", - "atomicwrites": "types-atomicwrites", - "attr": "types-attrs", "backports": "types-backports", "backports_abc": "types-backports_abc", "bleach": "types-bleach", "boto": "types-boto", "cachetools": "types-cachetools", - "chardet": "types-chardet", "click_spinner": "types-click-spinner", "contextvars": "types-contextvars", "croniter": "types-croniter", @@ -38,7 +35,6 @@ def stub_package_name(prefix: str) -> str: "decorator": "types-decorator", "deprecated": "types-Deprecated", "docutils": "types-docutils", - "emoji": "types-emoji", "first": "types-first", "geoip2": "types-geoip2", "gflags": "types-python-gflags", @@ -64,7 +60,6 @@ def stub_package_name(prefix: str) -> str: "six": "types-six", "slugify": "types-python-slugify", "tabulate": "types-tabulate", - "termcolor": "types-termcolor", "toml": "types-toml", "typed_ast": "types-typed-ast", "tzlocal": "types-tzlocal", @@ -83,10 +78,14 @@ def stub_package_name(prefix: str) -> str: # Note that these packages are omitted for now: # sqlalchemy: It's unclear which stub package to suggest. There's also # a mypy plugin available. +# pika: typeshed's stubs are on PyPI as types-pika-ts. +# types-pika already exists on PyPI, and is more complete in many ways, +# but is a non-typeshed stubs package. non_bundled_packages = { "MySQLdb": "types-mysqlclient", "PIL": "types-Pillow", "PyInstaller": "types-pyinstaller", + "Xlib": "types-python-xlib", "annoy": "types-annoy", "appdirs": "types-appdirs", "aws_xray_sdk": "types-aws-xray-sdk", @@ -100,9 +99,11 @@ def stub_package_name(prefix: str) -> str: "chevron": "types-chevron", "colorama": "types-colorama", "commonmark": "types-commonmark", - "cryptography": "types-cryptography", + "consolemenu": "types-console-menu", + "crontab": "types-python-crontab", "d3dshot": "types-D3DShot", "dj_database_url": "types-dj-database-url", + "dockerfile_parse": "types-dockerfile-parse", "docopt": "types-docopt", "editdistance": "types-editdistance", "entrypoints": "types-entrypoints", @@ -115,6 +116,7 @@ def stub_package_name(prefix: str) -> str: "flake8_simplify": "types-flake8-simplify", "flake8_typing_imports": "types-flake8-typing-imports", "flask_cors": "types-Flask-Cors", + "flask_migrate": "types-Flask-Migrate", "flask_sqlalchemy": "types-Flask-SQLAlchemy", "fpdf": "types-fpdf2", "gdb": "types-gdb", @@ -134,22 +136,28 @@ def stub_package_name(prefix: str) -> str: "oauthlib": "types-oauthlib", "openpyxl": "types-openpyxl", "opentracing": "types-opentracing", + "paho.mqtt": "types-paho-mqtt", "parsimonious": "types-parsimonious", "passlib": "types-passlib", "passpy": "types-passpy", + "peewee": "types-peewee", "pep8ext_naming": "types-pep8-naming", "playsound": "types-playsound", - "prettytable": "types-prettytable", "psutil": "types-psutil", "psycopg2": "types-psycopg2", "pyaudio": "types-pyaudio", "pyautogui": "types-PyAutoGUI", + "pycocotools": "types-pycocotools", "pyflakes": "types-pyflakes", "pygments": "types-Pygments", "pyi_splash": "types-pyinstaller", "pynput": "types-pynput", + "pythoncom": "types-pywin32", + "pythonwin": "types-pywin32", + "pyscreeze": "types-PyScreeze", "pysftp": "types-pysftp", "pytest_lazyfixture": "types-pytest-lazy-fixture", + "pywintypes": "types-pywin32", "regex": "types-regex", "send2trash": "types-Send2Trash", "slumber": "types-slumber", @@ -163,6 +171,12 @@ def stub_package_name(prefix: str) -> str: "urllib3": "types-urllib3", "vobject": "types-vobject", "whatthepatch": "types-whatthepatch", + "win32": "types-pywin32", + "win32api": "types-pywin32", + "win32con": "types-pywin32", + "win32com": "types-pywin32", + "win32comext": "types-pywin32", + "win32gui": "types-pywin32", "xmltodict": "types-xmltodict", "xxhash": "types-xxhash", "zxcvbn": "types-zxcvbn", From c4144640a98fb7ddc1eaacc26f659042d1a27e75 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 09:52:43 +0000 Subject: [PATCH 125/292] Optimize implementation of TypedDict types for **kwds (#14316) The implementation copied lots of callable types even when not using the new feature, which was expensive. Now we only generate a copy if a callable actually uses TypedDict types for **kwds. This made self check 7-8% faster (when compiled with -O0). The original implementation was in https://github.com/python/mypy/pull/13471. --- mypy/types.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 0ba0985436ed..ab2caa96e535 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1757,7 +1757,7 @@ def copy_modified( from_concatenate: Bogus[bool] = _dummy, unpack_kwargs: Bogus[bool] = _dummy, ) -> CT: - return type(self)( + modified = CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, @@ -1782,6 +1782,9 @@ def copy_modified( ), unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, ) + # Optimization: Only NewTypes are supported as subtypes since + # the class is effectively final, so we can use a cast safely. + return cast(CT, modified) def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" @@ -1976,7 +1979,7 @@ def expand_param_spec( def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: - return NormalizedCallableType(self.copy_modified()) + return cast(NormalizedCallableType, self) last_type = get_proper_type(self.arg_types[-1]) assert isinstance(last_type, TypedDictType) extra_kinds = [ @@ -2126,7 +2129,9 @@ def get_name(self) -> str | None: return self._items[0].name def with_unpacked_kwargs(self) -> Overloaded: - return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + if any(i.unpack_kwargs for i in self.items): + return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + return self def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_overloaded(self) From c588852451ec63f9a26fefe5eb82976aa03d611c Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 15:35:38 +0000 Subject: [PATCH 126/292] Speed up freshening type variables (#14323) Only perform type variable freshening if it's needed, i.e. there is a nested generic callable, since it's fairly expensive. Make the check for generic callables fast by creating a specialized type query visitor base class for queries with bool results. The visitor tries hard to avoid memory allocation in typical cases, since allocation is slow. This addresses at least some of the performance regression in #14095. This improved self-check performance by about 3% when compiled with mypyc (-O2). The new visitor class can potentially help with other type queries as well. I'll explore it in follow-up PRs. --- mypy/expandtype.py | 26 +++++++- mypy/type_visitor.py | 156 +++++++++++++++++++++++++++++++++++++++++++ mypy/types.py | 5 +- 3 files changed, 183 insertions(+), 4 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 1458fb74ce94..ca562ede264f 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -1,11 +1,14 @@ from __future__ import annotations from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload +from typing_extensions import Final from mypy.nodes import ARG_POS, ARG_STAR, Var from mypy.type_visitor import TypeTranslator from mypy.types import ( + ANY_STRATEGY, AnyType, + BoolTypeQuery, CallableType, DeletedType, ErasedType, @@ -138,13 +141,30 @@ def freshen_function_type_vars(callee: F) -> F: return cast(F, fresh_overload) +class HasGenericCallable(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_callable_type(self, t: CallableType) -> bool: + return t.is_generic() or super().visit_callable_type(t) + + +# Share a singleton since this is performance sensitive +has_generic_callable: Final = HasGenericCallable() + + T = TypeVar("T", bound=Type) def freshen_all_functions_type_vars(t: T) -> T: - result = t.accept(FreshenCallableVisitor()) - assert isinstance(result, type(t)) - return result + result: Type + has_generic_callable.reset() + if not t.accept(has_generic_callable): + return t # Fast path to avoid expensive freshening + else: + result = t.accept(FreshenCallableVisitor()) + assert isinstance(result, type(t)) + return result class FreshenCallableVisitor(TypeTranslator): diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index 0f5ac05e68ac..823e74e7e283 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -15,6 +15,7 @@ from abc import abstractmethod from typing import Any, Callable, Generic, Iterable, Sequence, TypeVar, cast +from typing_extensions import Final from mypy_extensions import mypyc_attr, trait @@ -417,3 +418,158 @@ def visit_type_alias_type(self, t: TypeAliasType) -> T: def query_types(self, types: Iterable[Type]) -> T: """Perform a query for a list of types using the strategy to combine the results.""" return self.strategy([t.accept(self) for t in types]) + + +# Return True if at least one type component returns True +ANY_STRATEGY: Final = 0 +# Return True if no type component returns False +ALL_STRATEGY: Final = 1 + + +class BoolTypeQuery(SyntheticTypeVisitor[bool]): + """Visitor for performing recursive queries of types with a bool result. + + Use TypeQuery if you need non-bool results. + + 'strategy' is used to combine results for a series of types. It must + be ANY_STRATEGY or ALL_STRATEGY. + + Note: This visitor keeps an internal state (tracks type aliases to avoid + recursion), so it should *never* be re-used for querying different types + unless you call reset() first. + """ + + def __init__(self, strategy: int) -> None: + self.strategy = strategy + if strategy == ANY_STRATEGY: + self.default = False + else: + assert strategy == ALL_STRATEGY + self.default = True + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. An empty set is + # represented as None as a micro-optimization. + self.seen_aliases: set[TypeAliasType] | None = None + # By default, we eagerly expand type aliases, and query also types in the + # alias target. In most cases this is a desired behavior, but we may want + # to skip targets in some cases (e.g. when collecting type variables). + self.skip_alias_target = False + + def reset(self) -> None: + """Clear mutable state (but preserve strategy). + + This *must* be called if you want to reuse the visitor. + """ + self.seen_aliases = None + + def visit_unbound_type(self, t: UnboundType) -> bool: + return self.query_types(t.args) + + def visit_type_list(self, t: TypeList) -> bool: + return self.query_types(t.items) + + def visit_callable_argument(self, t: CallableArgument) -> bool: + return t.typ.accept(self) + + def visit_any(self, t: AnyType) -> bool: + return self.default + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return self.default + + def visit_none_type(self, t: NoneType) -> bool: + return self.default + + def visit_erased_type(self, t: ErasedType) -> bool: + return self.default + + def visit_deleted_type(self, t: DeletedType) -> bool: + return self.default + + def visit_type_var(self, t: TypeVarType) -> bool: + return self.query_types([t.upper_bound] + t.values) + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return self.default + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return self.default + + def visit_unpack_type(self, t: UnpackType) -> bool: + return self.query_types([t.type]) + + def visit_parameters(self, t: Parameters) -> bool: + return self.query_types(t.arg_types) + + def visit_partial_type(self, t: PartialType) -> bool: + return self.default + + def visit_instance(self, t: Instance) -> bool: + return self.query_types(t.args) + + def visit_callable_type(self, t: CallableType) -> bool: + # FIX generics + # Avoid allocating any objects here as an optimization. + args = self.query_types(t.arg_types) + ret = t.ret_type.accept(self) + if self.strategy == ANY_STRATEGY: + return args or ret + else: + return args and ret + + def visit_tuple_type(self, t: TupleType) -> bool: + return self.query_types(t.items) + + def visit_typeddict_type(self, t: TypedDictType) -> bool: + return self.query_types(list(t.items.values())) + + def visit_raw_expression_type(self, t: RawExpressionType) -> bool: + return self.default + + def visit_literal_type(self, t: LiteralType) -> bool: + return self.default + + def visit_star_type(self, t: StarType) -> bool: + return t.type.accept(self) + + def visit_union_type(self, t: UnionType) -> bool: + return self.query_types(t.items) + + def visit_overloaded(self, t: Overloaded) -> bool: + return self.query_types(t.items) # type: ignore[arg-type] + + def visit_type_type(self, t: TypeType) -> bool: + return t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType) -> bool: + return self.default + + def visit_placeholder_type(self, t: PlaceholderType) -> bool: + return self.query_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType) -> bool: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if self.seen_aliases is None: + self.seen_aliases = set() + elif t in self.seen_aliases: + return self.default + self.seen_aliases.add(t) + if self.skip_alias_target: + return self.query_types(t.args) + return get_proper_type(t).accept(self) + + def query_types(self, types: list[Type] | tuple[Type, ...]) -> bool: + """Perform a query for a sequence of types using the strategy to combine the results.""" + # Special-case for lists and tuples to allow mypyc to produce better code. + if isinstance(types, list): + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) + else: + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) diff --git a/mypy/types.py b/mypy/types.py index ab2caa96e535..5c1fe2a0e960 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2879,7 +2879,10 @@ def get_proper_types(it: Iterable[Type | None]) -> list[ProperType] | list[Prope # to make it easier to gradually get modules working with mypyc. # Import them here, after the types are defined. # This is intended as a re-export also. -from mypy.type_visitor import ( # noqa: F811 +from mypy.type_visitor import ( # noqa: F811,F401 + ALL_STRATEGY as ALL_STRATEGY, + ANY_STRATEGY as ANY_STRATEGY, + BoolTypeQuery as BoolTypeQuery, SyntheticTypeVisitor as SyntheticTypeVisitor, TypeQuery as TypeQuery, TypeTranslator as TypeTranslator, From d35e571ef303ec63f4c8c3437669c858d85329eb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 15:45:18 +0000 Subject: [PATCH 127/292] Optimize type parameter checks in subtype checking (#14324) Avoid the use of a nested function, which are a bit slow when compiled with mypyc. Also avoid a callable value and instead call a function directly, which allows using faster native calls. Based on a quick experiment, this speeds up self check by about 3%. This addresses some of the slowdown introduced in #13303. --- mypy/subtypes.py | 60 ++++++++++++++++++++++-------------------------- 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index e4667c45fbc5..bdeeed6c6d67 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -330,34 +330,28 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool def check_type_parameter( - lefta: Type, righta: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext + left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext ) -> bool: - def check(left: Type, right: Type) -> bool: - return ( - is_proper_subtype(left, right, subtype_context=subtype_context) - if proper_subtype - else is_subtype(left, right, subtype_context=subtype_context) - ) - if variance == COVARIANT: - return check(lefta, righta) + if proper_subtype: + return is_proper_subtype(left, right, subtype_context=subtype_context) + else: + return is_subtype(left, right, subtype_context=subtype_context) elif variance == CONTRAVARIANT: - return check(righta, lefta) + if proper_subtype: + return is_proper_subtype(right, left, subtype_context=subtype_context) + else: + return is_subtype(right, left, subtype_context=subtype_context) else: if proper_subtype: # We pass ignore_promotions=False because it is a default for subtype checks. # The actual value will be taken from the subtype_context, and it is whatever # the original caller passed. return is_same_type( - lefta, righta, ignore_promotions=False, subtype_context=subtype_context + left, right, ignore_promotions=False, subtype_context=subtype_context ) - return is_equivalent(lefta, righta, subtype_context=subtype_context) - - -def ignore_type_parameter( - lefta: Type, righta: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext -) -> bool: - return True + else: + return is_equivalent(left, right, subtype_context=subtype_context) class SubtypeVisitor(TypeVisitor[bool]): @@ -366,9 +360,6 @@ def __init__(self, right: Type, subtype_context: SubtypeContext, proper_subtype: self.orig_right = right self.proper_subtype = proper_subtype self.subtype_context = subtype_context - self.check_type_parameter = ( - ignore_type_parameter if subtype_context.ignore_type_params else check_type_parameter - ) self.options = subtype_context.options self._subtype_kind = SubtypeVisitor.build_subtype_kind(subtype_context, proper_subtype) @@ -572,17 +563,22 @@ def check_mixed( ) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) - for lefta, righta, tvar in type_params: - if isinstance(tvar, TypeVarType): - if not self.check_type_parameter( - lefta, righta, tvar.variance, self.proper_subtype, self.subtype_context - ): - nominal = False - else: - if not self.check_type_parameter( - lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context - ): - nominal = False + if not self.subtype_context.ignore_type_params: + for lefta, righta, tvar in type_params: + if isinstance(tvar, TypeVarType): + if not check_type_parameter( + lefta, + righta, + tvar.variance, + self.proper_subtype, + self.subtype_context, + ): + nominal = False + else: + if not check_type_parameter( + lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context + ): + nominal = False if nominal: TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal From b5fc748a1ccb65a9d4dc0eef897c71ee8b15ddc9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 15:45:42 +0000 Subject: [PATCH 128/292] Optimize subtype checking by avoiding a nested function (#14325) Mypyc isn't good at compiling nested functions, and this one was in one of the hottest code paths in all of mypy. The nested function wasn't even used that often, but mypyc would still construct a closure object every time. This adds some code duplication, but it's well worth it. Amazingly, this speeds up self-check by about 10%, if my measurements are to be trusted! This addresses some of the slowdown introduced in #13303. #14324 addresses another related slowdown. --- mypy/subtypes.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index bdeeed6c6d67..b8d59977f986 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -289,18 +289,20 @@ def _is_subtype( # ErasedType as we do for non-proper subtyping. return True - def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool: - if proper_subtype: - return is_proper_subtype(left, right, subtype_context=subtype_context) - return is_subtype(left, right, subtype_context=subtype_context) - if isinstance(right, UnionType) and not isinstance(left, UnionType): # Normally, when 'left' is not itself a union, the only way # 'left' can be a subtype of the union 'right' is if it is a # subtype of one of the items making up the union. - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # Recombine rhs literal types, to make an enum type a subtype # of a union of all enum items as literal types. Only do it if # the previous check didn't succeed, since recombining can be @@ -312,9 +314,16 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool and (left.type.is_enum or left.type.fullname == "builtins.bool") ): right = UnionType(mypy.typeops.try_contracting_literals_in_union(right.items)) - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # However, if 'left' is a type variable T, T might also have # an upper bound which is itself a union. This case will be # handled below by the SubtypeVisitor. We have to check both From 7ed4f5ef3cff221505a54851a23fd80fa0a412c6 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 17:12:57 +0000 Subject: [PATCH 129/292] Speed up recursive type check (#14326) Use a faster type query visitor and reuse visitor across calls. This should speed up type checking slightly. My measurements show a ~0.5% improvement, but it may be below the noise floor. --- mypy/types.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 5c1fe2a0e960..bff83ba52df6 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3321,17 +3321,22 @@ def has_type_vars(typ: Type) -> bool: return typ.accept(HasTypeVars()) -class HasRecursiveType(TypeQuery[bool]): +class HasRecursiveType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_type_alias_type(self, t: TypeAliasType) -> bool: return t.is_recursive or self.query_types(t.args) +# Use singleton since this is hot (note: call reset() before using) +_has_recursive_type: Final = HasRecursiveType() + + def has_recursive_types(typ: Type) -> bool: """Check if a type contains any recursive aliases (recursively).""" - return typ.accept(HasRecursiveType()) + _has_recursive_type.reset() + return typ.accept(_has_recursive_type) def flatten_nested_unions( From 41574e0cc521987376dc8c149095b3483aff80ff Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Tue, 20 Dec 2022 16:04:04 -0500 Subject: [PATCH 130/292] Allow 'in' to narrow TypedDict unions (#13838) `in` could narrow unions of TypeDicts, e.g. ```python class A(TypedDict) foo: int @final class B(TypedDict): bar: int union: Union[A, B] = ... value: int if 'foo' in union: # Cannot be a B as it is final and has no "foo" field, so must be an A value = union['foo'] else: # Cannot be an A as those went to the if branch value = union['bar'] ``` --- mypy/checker.py | 91 +++++++-- mypy/types.py | 4 + test-data/unit/check-typeddict.test | 185 +++++++++++++++++++ test-data/unit/fixtures/typing-typeddict.pyi | 1 + 4 files changed, 262 insertions(+), 19 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 1c8956ae6722..d56d3e2716f1 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5097,6 +5097,45 @@ def conditional_callable_type_map( return None, {} + def conditional_types_for_iterable( + self, item_type: Type, iterable_type: Type + ) -> tuple[Type | None, Type | None]: + """ + Narrows the type of `iterable_type` based on the type of `item_type`. + For now, we only support narrowing unions of TypedDicts based on left operand being literal string(s). + """ + if_types: list[Type] = [] + else_types: list[Type] = [] + + iterable_type = get_proper_type(iterable_type) + if isinstance(iterable_type, UnionType): + possible_iterable_types = get_proper_types(iterable_type.relevant_items()) + else: + possible_iterable_types = [iterable_type] + + item_str_literals = try_getting_str_literals_from_type(item_type) + + for possible_iterable_type in possible_iterable_types: + if item_str_literals and isinstance(possible_iterable_type, TypedDictType): + for key in item_str_literals: + if key in possible_iterable_type.required_keys: + if_types.append(possible_iterable_type) + elif ( + key in possible_iterable_type.items or not possible_iterable_type.is_final + ): + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + else: + else_types.append(possible_iterable_type) + else: + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + + return ( + UnionType.make_union(if_types) if if_types else None, + UnionType.make_union(else_types) if else_types else None, + ) + def _is_truthy_type(self, t: ProperType) -> bool: return ( ( @@ -5412,28 +5451,42 @@ def has_no_custom_eq_checks(t: Type) -> bool: elif operator in {"in", "not in"}: assert len(expr_indices) == 2 left_index, right_index = expr_indices - if left_index not in narrowable_operand_index_to_hash: - continue - item_type = operand_types[left_index] - collection_type = operand_types[right_index] + iterable_type = operand_types[right_index] - # We only try and narrow away 'None' for now - if not is_optional(item_type): - continue + if_map, else_map = {}, {} + + if left_index in narrowable_operand_index_to_hash: + # We only try and narrow away 'None' for now + if is_optional(item_type): + collection_item_type = get_proper_type( + builtin_item_type(iterable_type) + ) + if ( + collection_item_type is not None + and not is_optional(collection_item_type) + and not ( + isinstance(collection_item_type, Instance) + and collection_item_type.type.fullname == "builtins.object" + ) + and is_overlapping_erased_types(item_type, collection_item_type) + ): + if_map[operands[left_index]] = remove_optional(item_type) + + if right_index in narrowable_operand_index_to_hash: + if_type, else_type = self.conditional_types_for_iterable( + item_type, iterable_type + ) + expr = operands[right_index] + if if_type is None: + if_map = None + else: + if_map[expr] = if_type + if else_type is None: + else_map = None + else: + else_map[expr] = else_type - collection_item_type = get_proper_type(builtin_item_type(collection_type)) - if collection_item_type is None or is_optional(collection_item_type): - continue - if ( - isinstance(collection_item_type, Instance) - and collection_item_type.type.fullname == "builtins.object" - ): - continue - if is_overlapping_erased_types(item_type, collection_item_type): - if_map, else_map = {operands[left_index]: remove_optional(item_type)}, {} - else: - continue else: if_map = {} else_map = {} diff --git a/mypy/types.py b/mypy/types.py index bff83ba52df6..383e5621060e 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2334,6 +2334,10 @@ def deserialize(cls, data: JsonDict) -> TypedDictType: Instance.deserialize(data["fallback"]), ) + @property + def is_final(self) -> bool: + return self.fallback.type.is_final + def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index fbef6157087c..d277fa441b1e 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -2025,6 +2025,191 @@ v = {bad2: 2} # E: Extra key "bad" for TypedDict "Value" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testOperatorContainsNarrowsTypedDicts_unionWithList] +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class D(TypedDict): + foo: int + + +d_or_list: D | list[str] + +if 'foo' in d_or_list: + assert_type(d_or_list, Union[D, list[str]]) +elif 'bar' in d_or_list: + assert_type(d_or_list, list[str]) +else: + assert_type(d_or_list, list[str]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_total] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, TypeVar, Union +from typing_extensions import final + +@final +class D1(TypedDict): + foo: int + + +@final +class D2(TypedDict): + bar: int + + +d: D1 | D2 + +if 'foo' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +foo_or_bar: Literal['foo', 'bar'] +if foo_or_bar in d: + assert_type(d, Union[D1, D2]) +else: + assert_type(d, Union[D1, D2]) + +foo_or_invalid: Literal['foo', 'invalid'] +if foo_or_invalid in d: + assert_type(d, D1) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) +else: + assert_type(d, Union[D1, D2]) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) + +TD = TypeVar('TD', D1, D2) + +def f(arg: TD) -> None: + value: int + if 'foo' in arg: + assert_type(arg['foo'], int) + else: + assert_type(arg['bar'], int) + + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_final] +# flags: --warn-unreachable +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class DFinal(TypedDict): + foo: int + + +class DNotFinal(TypedDict): + bar: int + + +d_not_final: DNotFinal + +if 'bar' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + spam = 'ham' # E: Statement is unreachable + +if 'spam' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + assert_type(d_not_final, DNotFinal) + +d_final: DFinal + +if 'spam' in d_final: + spam = 'ham' # E: Statement is unreachable +else: + assert_type(d_final, DFinal) + +d_union: DFinal | DNotFinal + +if 'foo' in d_union: + assert_type(d_union, Union[DFinal, DNotFinal]) +else: + assert_type(d_union, DNotFinal) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_partialThroughTotalFalse] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, Union +from typing_extensions import final + +@final +class DTotal(TypedDict): + required_key: int + + +@final +class DNotTotal(TypedDict, total=False): + optional_key: int + + +d: DTotal | DNotTotal + +if 'required_key' in d: + assert_type(d, DTotal) +else: + assert_type(d, DNotTotal) + +if 'optional_key' in d: + assert_type(d, DNotTotal) +else: + assert_type(d, Union[DTotal, DNotTotal]) + +key: Literal['optional_key', 'required_key'] +if key in d: + assert_type(d, Union[DTotal, DNotTotal]) +else: + assert_type(d, Union[DTotal, DNotTotal]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_partialThroughNotRequired] +from __future__ import annotations +from typing import assert_type, Required, NotRequired, TypedDict, Union +from typing_extensions import final + +@final +class D1(TypedDict): + required_key: Required[int] + optional_key: NotRequired[int] + + +@final +class D2(TypedDict): + abc: int + xyz: int + + +d: D1 | D2 + +if 'required_key' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +if 'optional_key' in d: + assert_type(d, D1) +else: + assert_type(d, Union[D1, D2]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testCannotSubclassFinalTypedDict] from typing import TypedDict from typing_extensions import final diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index e398dff3fc6b..92ae402b9ea5 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -9,6 +9,7 @@ from abc import ABCMeta cast = 0 +assert_type = 0 overload = 0 Any = 0 Union = 0 From f7ed65b0ff3b1bb82ff5e305129bfc251dfeec1d Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 20 Dec 2022 23:15:29 +0000 Subject: [PATCH 131/292] Revert "[mypyc] Use tabs instead of spaces in emitted C code (#14016)" (#14152) This reverts commit dbcbb3f5c3ef791c98088da0bd1dfa6cbf51f301. The indentation in generated code is now inconsistent if using 4-space tabs. We should either use tabs or spaces consistently everywhere, since we can't expect everybody to have the same tab width. The broken indentation can be seen by compiling a hello world program and opening it in an editor configured to use 4-space tabs. Since there is a lot of code in mypyc that assumes a 4-space indentation, fixing it all is probably only feasible by normalizing the indentation during the emit phase. However, the normalization step might actually slow down compilation slightly, whereas the intent of the original change to improve efficiency, so this change may ultimately be impractical. In the future we should make it possible to normalize tabs without any significant cost, but I'm not sure if that's possible right now. --- mypyc/codegen/emit.py | 6 +++--- mypyc/test/test_emit.py | 2 +- mypyc/test/test_emitfunc.py | 6 +++--- mypyc/test/test_emitwrapper.py | 3 +-- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 368c5dd366ea..6e0c89dd0ecf 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -176,10 +176,10 @@ def __init__( # Low-level operations def indent(self) -> None: - self._indent += 1 + self._indent += 4 def dedent(self) -> None: - self._indent -= 1 + self._indent -= 4 assert self._indent >= 0 def label(self, label: BasicBlock) -> str: @@ -194,7 +194,7 @@ def attr(self, name: str) -> str: def emit_line(self, line: str = "") -> None: if line.startswith("}"): self.dedent() - self.fragments.append(self._indent * "\t" + line + "\n") + self.fragments.append(self._indent * " " + line + "\n") if line.endswith("{"): self.indent() diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 1b624a7a6cdb..7351cd7fb13e 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -28,4 +28,4 @@ def test_emit_line(self) -> None: emitter.emit_line("a {") emitter.emit_line("f();") emitter.emit_line("}") - assert emitter.fragments == ["line;\n", "a {\n", "\tf();\n", "}\n"] + assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"] diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 3b44f7e444c8..d7dcf3be532b 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -833,7 +833,7 @@ def assert_emit( op.accept(visitor) frags = declarations.fragments + emitter.fragments - actual_lines = [line.strip(" \t") for line in frags] + actual_lines = [line.strip(" ") for line in frags] assert all(line.endswith("\n") for line in actual_lines) actual_lines = [line.rstrip("\n") for line in actual_lines] if not expected.strip(): @@ -900,7 +900,7 @@ def test_simple(self) -> None: " return cpy_r_arg;\n", "}\n", ], - [line.replace("\t", 4 * " ") for line in result], + result, msg="Generated code invalid", ) @@ -927,6 +927,6 @@ def test_register(self) -> None: " CPy_Unreachable();\n", "}\n", ], - [line.replace("\t", 4 * " ") for line in result], + result, msg="Generated code invalid", ) diff --git a/mypyc/test/test_emitwrapper.py b/mypyc/test/test_emitwrapper.py index ec5adb4c6622..c4465656444c 100644 --- a/mypyc/test/test_emitwrapper.py +++ b/mypyc/test/test_emitwrapper.py @@ -56,6 +56,5 @@ def test_check_int(self) -> None: ) def assert_lines(self, expected: list[str], actual: list[str]) -> None: - actual = [line.rstrip("\n").replace(4 * " ", "\t") for line in actual] - expected = [line.replace(4 * " ", "\t") for line in expected] + actual = [line.rstrip("\n") for line in actual] assert_string_arrays_equal(expected, actual, "Invalid output") From cb1d1a0baba37f35268cb605b7345726f257f960 Mon Sep 17 00:00:00 2001 From: Lefteris Karapetsas Date: Wed, 21 Dec 2022 00:16:51 +0100 Subject: [PATCH 132/292] Fix RST markup in type_narrowing.rst (#14253) Switch :py:func:type to py:class:type in type_narrowing.rst. The former does not render properly in the docs This is a tiny follow up from https://github.com/python/mypy/pull/14246#issuecomment-1336467793 --- docs/source/type_narrowing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/type_narrowing.rst b/docs/source/type_narrowing.rst index 806835ed33a5..72a816679140 100644 --- a/docs/source/type_narrowing.rst +++ b/docs/source/type_narrowing.rst @@ -16,7 +16,7 @@ The simplest way to narrow a type is to use one of the supported expressions: - :py:func:`isinstance` like in ``isinstance(obj, float)`` will narrow ``obj`` to have ``float`` type - :py:func:`issubclass` like in ``issubclass(cls, MyClass)`` will narrow ``cls`` to be ``Type[MyClass]`` -- :py:func:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type +- :py:class:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type - :py:func:`callable` like in ``callable(obj)`` will narrow object to callable type Type narrowing is contextual. For example, based on the condition, mypy will narrow an expression only within an ``if`` branch: From 2d5108b343ad11f5e2087e9e8ed55202cbd3237c Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 21 Dec 2022 10:27:09 +0000 Subject: [PATCH 133/292] Change various type queries into faster bool type queries (#14330) I measured a 1% performance improvement in self check. --- mypy/checker.py | 7 ++++--- mypy/checkexpr.py | 20 ++++++++++---------- mypy/semanal_shared.py | 6 +++--- mypy/typeanal.py | 10 ++++++---- mypy/types.py | 4 ++-- 5 files changed, 25 insertions(+), 22 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index d56d3e2716f1..119ce6ae6338 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -177,8 +177,10 @@ tuple_fallback, ) from mypy.types import ( + ANY_STRATEGY, OVERLOAD_NAMES, AnyType, + BoolTypeQuery, CallableType, DeletedType, ErasedType, @@ -196,7 +198,6 @@ TypedDictType, TypeGuardedType, TypeOfAny, - TypeQuery, TypeTranslator, TypeType, TypeVarId, @@ -7134,7 +7135,7 @@ def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: return not typ.accept(InvalidInferredTypes()) -class InvalidInferredTypes(TypeQuery[bool]): +class InvalidInferredTypes(BoolTypeQuery): """Find type components that are not valid for an inferred type. These include type, and any types resulting from failed @@ -7142,7 +7143,7 @@ class InvalidInferredTypes(TypeQuery[bool]): """ def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return t.ambiguous diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index b97c78cba2fc..65be472ccec7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -5106,9 +5106,9 @@ def has_any_type(t: Type, ignore_in_type_obj: bool = False) -> bool: return t.accept(HasAnyType(ignore_in_type_obj)) -class HasAnyType(types.TypeQuery[bool]): +class HasAnyType(types.BoolTypeQuery): def __init__(self, ignore_in_type_obj: bool) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) self.ignore_in_type_obj = ignore_in_type_obj def visit_any(self, t: AnyType) -> bool: @@ -5185,7 +5185,7 @@ def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> Callabl return c.copy_modified(ret_type=new_ret_type) -class ArgInferSecondPassQuery(types.TypeQuery[bool]): +class ArgInferSecondPassQuery(types.BoolTypeQuery): """Query whether an argument type should be inferred in the second pass. The result is True if the type has a type variable in a callable return @@ -5194,17 +5194,17 @@ class ArgInferSecondPassQuery(types.TypeQuery[bool]): """ def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_callable_type(self, t: CallableType) -> bool: return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) -class HasTypeVarQuery(types.TypeQuery[bool]): +class HasTypeVarQuery(types.BoolTypeQuery): """Visitor for querying whether a type has a type variable component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_type_var(self, t: TypeVarType) -> bool: return True @@ -5214,11 +5214,11 @@ def has_erased_component(t: Type | None) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) -class HasErasedComponentsQuery(types.TypeQuery[bool]): +class HasErasedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an erased component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_erased_type(self, t: ErasedType) -> bool: return True @@ -5228,11 +5228,11 @@ def has_uninhabited_component(t: Type | None) -> bool: return t is not None and t.accept(HasUninhabitedComponentsQuery()) -class HasUninhabitedComponentsQuery(types.TypeQuery[bool]): +class HasUninhabitedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an UninhabitedType component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return True diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index ee9218f02b3e..e5be4aa55cd3 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -21,7 +21,7 @@ TypeInfo, ) from mypy.tvar_scope import TypeVarLikeScope -from mypy.type_visitor import TypeQuery +from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery from mypy.types import ( TPDICT_FB_NAMES, FunctionLike, @@ -319,9 +319,9 @@ def paramspec_kwargs( ) -class HasPlaceholders(TypeQuery[bool]): +class HasPlaceholders(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_placeholder_type(self, t: PlaceholderType) -> bool: return True diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 468b10fc9847..e4f56924d2d7 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -42,11 +42,13 @@ from mypy.tvar_scope import TypeVarLikeScope from mypy.types import ( ANNOTATED_TYPE_NAMES, + ANY_STRATEGY, FINAL_TYPE_NAMES, LITERAL_TYPE_NAMES, NEVER_NAMES, TYPE_ALIAS_NAMES, AnyType, + BoolTypeQuery, CallableArgument, CallableType, DeletedType, @@ -1944,9 +1946,9 @@ def has_any_from_unimported_type(t: Type) -> bool: return t.accept(HasAnyFromUnimportedType()) -class HasAnyFromUnimportedType(TypeQuery[bool]): +class HasAnyFromUnimportedType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type @@ -2033,10 +2035,10 @@ def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) - return typ.accept(HasSelfType(lookup)) -class HasSelfType(TypeQuery[bool]): +class HasSelfType(BoolTypeQuery): def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: self.lookup = lookup - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_unbound_type(self, t: UnboundType) -> bool: sym = self.lookup(t.name) diff --git a/mypy/types.py b/mypy/types.py index 383e5621060e..86a700d52469 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3305,9 +3305,9 @@ def replace_alias_tvars( return new_tp -class HasTypeVars(TypeQuery[bool]): +class HasTypeVars(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) self.skip_alias_target = True def visit_type_var(self, t: TypeVarType) -> bool: From 4e32da85bdae5b379711c6a1443a58e37d003037 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 21 Dec 2022 11:37:06 +0000 Subject: [PATCH 134/292] Refactor TypeState into a singleton class (#14327) This helps mypyc, since accessing mutable attributes of singleton instances is faster than accessing class variables. The implementation is also arguably a bit cleaner. This seems performance-neutral or a very minor optimization, but if we continue to add attributes to TypeState, this can help. --- mypy/build.py | 8 +-- mypy/checkexpr.py | 8 +-- mypy/constraints.py | 8 +-- mypy/mro.py | 4 +- mypy/server/astmerge.py | 4 +- mypy/server/aststrip.py | 4 +- mypy/server/deps.py | 6 +- mypy/server/update.py | 6 +- mypy/solve.py | 4 +- mypy/subtypes.py | 20 +++--- mypy/test/testdeps.py | 4 +- mypy/typestate.py | 141 ++++++++++++++++++++-------------------- 12 files changed, 108 insertions(+), 109 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index d2bcd572969e..2e0fa455554a 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -100,7 +100,7 @@ stub_package_name, ) from mypy.types import Type -from mypy.typestate import TypeState, reset_global_state +from mypy.typestate import reset_global_state, type_state from mypy.version import __version__ # Switch to True to produce debug output related to fine-grained incremental @@ -276,7 +276,7 @@ def _build( try: graph = dispatch(sources, manager, stdout) if not options.fine_grained_incremental: - TypeState.reset_all_subtype_caches() + type_state.reset_all_subtype_caches() if options.timing_stats is not None: dump_timing_stats(options.timing_stats, graph) if options.line_checking_stats is not None: @@ -2459,7 +2459,7 @@ def update_fine_grained_deps(self, deps: dict[str, set[str]]) -> None: from mypy.server.deps import merge_dependencies # Lazy import to speed up startup merge_dependencies(self.compute_fine_grained_deps(), deps) - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def valid_references(self) -> set[str]: assert self.ancestors is not None @@ -2926,7 +2926,7 @@ def dispatch(sources: list[BuildSource], manager: BuildManager, stdout: TextIO) # then we need to collect fine grained protocol dependencies. # Since these are a global property of the program, they are calculated after we # processed the whole graph. - TypeState.add_all_protocol_deps(manager.fg_deps) + type_state.add_all_protocol_deps(manager.fg_deps) if not manager.options.fine_grained_incremental: rdeps = generate_deps_for_cache(manager, graph) write_deps_cache(rdeps, manager, graph) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 65be472ccec7..d839ad4925fd 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -162,7 +162,7 @@ is_self_type_like, remove_optional, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevars import fill_typevars from mypy.util import split_module_names from mypy.visitor import ExpressionVisitor @@ -1591,13 +1591,13 @@ def allow_unions(self, type_context: Type) -> Iterator[None]: # of joins. This is a bit arbitrary, but in practice it works for most # cases. A cleaner alternative would be to switch to single bin type # inference, but this is a lot of work. - old = TypeState.infer_unions + old = type_state.infer_unions if has_recursive_types(type_context): - TypeState.infer_unions = True + type_state.infer_unions = True try: yield finally: - TypeState.infer_unions = old + type_state.infer_unions = old def infer_arg_types_in_context( self, diff --git a/mypy/constraints.py b/mypy/constraints.py index 3a6553a307fd..fe3c1a19ff18 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -48,7 +48,7 @@ is_named_instance, is_union_with_any, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevartuples import ( extract_unpack, find_unpack_in_list, @@ -198,7 +198,7 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons if any( get_proper_type(template) == get_proper_type(t) and get_proper_type(actual) == get_proper_type(a) - for (t, a) in reversed(TypeState.inferring) + for (t, a) in reversed(type_state.inferring) ): return [] if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): @@ -207,9 +207,9 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons if not has_type_vars(template): # Return early on an empty branch. return [] - TypeState.inferring.append((template, actual)) + type_state.inferring.append((template, actual)) res = _infer_constraints(template, actual, direction) - TypeState.inferring.pop() + type_state.inferring.pop() return res return _infer_constraints(template, actual, direction) diff --git a/mypy/mro.py b/mypy/mro.py index 912cf3e2e341..cc9f88a9d045 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -4,7 +4,7 @@ from mypy.nodes import TypeInfo from mypy.types import Instance -from mypy.typestate import TypeState +from mypy.typestate import type_state def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None) -> None: @@ -17,7 +17,7 @@ def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None info.mro = mro # The property of falling back to Any is inherited. info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) - TypeState.reset_all_subtype_caches_for(info) + type_state.reset_all_subtype_caches_for(info) class MroError(Exception): diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 04422036b67b..6ce737c42520 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -110,7 +110,7 @@ UnionType, UnpackType, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import get_prefix, replace_object_state @@ -360,7 +360,7 @@ def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: # The subclass relationships may change, so reset all caches relevant to the # old MRO. new = cast(TypeInfo, self.replacements[node]) - TypeState.reset_all_subtype_caches_for(new) + type_state.reset_all_subtype_caches_for(new) return self.fixup(node) def fixup_type(self, typ: Type | None) -> None: diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 83d90f31e8c4..b0666f8e1ff4 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -66,7 +66,7 @@ ) from mypy.traverser import TraverserVisitor from mypy.types import CallableType -from mypy.typestate import TypeState +from mypy.typestate import type_state SavedAttributes: _TypeAlias = Dict[Tuple[ClassDef, str], SymbolTableNode] @@ -143,7 +143,7 @@ def visit_class_def(self, node: ClassDef) -> None: super().visit_class_def(node) node.defs.body.extend(node.removed_statements) node.removed_statements = [] - TypeState.reset_subtype_caches_for(node.info) + type_state.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO node.analyzed = None diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 45d7947641da..eb40737061bf 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -172,7 +172,7 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a UnpackType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import correct_relative_import @@ -344,7 +344,7 @@ def process_type_info(self, info: TypeInfo) -> None: self.add_dependency( make_wildcard_trigger(base_info.fullname), target=make_trigger(target) ) - # More protocol dependencies are collected in TypeState._snapshot_protocol_deps + # More protocol dependencies are collected in type_state._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) @@ -1123,7 +1123,7 @@ def dump_all_dependencies( deps = get_dependencies(node, type_map, python_version, options) for trigger, targets in deps.items(): all_deps.setdefault(trigger, set()).update(targets) - TypeState.add_all_protocol_deps(all_deps) + type_state.add_all_protocol_deps(all_deps) for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): print(trigger) diff --git a/mypy/server/update.py b/mypy/server/update.py index e9750dec1e2a..9bea1998c0e5 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -157,7 +157,7 @@ from mypy.server.deps import get_dependencies_of_target, merge_dependencies from mypy.server.target import trigger_to_target from mypy.server.trigger import WILDCARD_TAG, make_trigger -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import module_prefix, split_target MAX_ITER: Final = 1000 @@ -869,7 +869,7 @@ def propagate_changes_using_dependencies( # We need to do this to avoid false negatives if the protocol itself is # unchanged, but was marked stale because its sub- (or super-) type changed. for info in stale_protos: - TypeState.reset_subtype_caches_for(info) + type_state.reset_subtype_caches_for(info) # Then fully reprocess all targets. # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): @@ -1081,7 +1081,7 @@ def update_deps( for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) # Merge also the newly added protocol deps (if any). - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def lookup_target( diff --git a/mypy/solve.py b/mypy/solve.py index c9c7db1ae26c..b8304d29c1ce 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -18,7 +18,7 @@ UnionType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state def solve_constraints( @@ -54,7 +54,7 @@ def solve_constraints( if bottom is None: bottom = c.target else: - if TypeState.infer_unions: + if type_state.infer_unions: # This deviates from the general mypy semantics because # recursive types are union-heavy in 95% of cases. bottom = UnionType.make_union([bottom, c.target]) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index b8d59977f986..994c4081addd 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -60,7 +60,7 @@ get_proper_type, is_named_instance, ) -from mypy.typestate import SubtypeKind, TypeState +from mypy.typestate import SubtypeKind, type_state from mypy.typevars import fill_typevars_with_any from mypy.typevartuples import extract_unpack, fully_split_with_mapped_and_template @@ -154,7 +154,7 @@ def is_subtype( options, } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_subtype(left, right): + if type_state.is_assumed_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # This case requires special care because it may cause infinite recursion. @@ -174,7 +174,7 @@ def is_subtype( # B = Union[int, Tuple[B, ...]] # When checking if A <: B we push pair (A, B) onto 'assuming' stack, then when after few # steps we come back to initial call is_subtype(A, B) and immediately return True. - with pop_on_exit(TypeState.get_assumptions(is_proper=False), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=False), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=False) return _is_subtype(left, right, subtype_context, proper_subtype=False) @@ -215,11 +215,11 @@ def is_proper_subtype( ignore_uninhabited, } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_proper_subtype(left, right): + if type_state.is_assumed_proper_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # Same as for non-proper subtype, see detailed comment there for explanation. - with pop_on_exit(TypeState.get_assumptions(is_proper=True), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=True), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=True) return _is_subtype(left, right, subtype_context, proper_subtype=True) @@ -445,14 +445,14 @@ def visit_instance(self, left: Instance) -> bool: if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) if isinstance(right, Instance): - if TypeState.is_cached_subtype_check(self._subtype_kind, left, right): + if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True if not self.subtype_context.ignore_promotions: for base in left.type.mro: if base._promote and any( self._is_subtype(p, self.right) for p in base._promote ): - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return True # Special case: Low-level integer types are compatible with 'int'. We can't # use promotions, since 'int' is already promoted to low-level integer types, @@ -589,7 +589,7 @@ def check_mixed( ): nominal = False if nominal: - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( left, right, proper_subtype=self.proper_subtype @@ -978,7 +978,7 @@ def f(self) -> A: ... if skip is None: skip = [] # We need to record this check to generate protocol fine-grained dependencies. - TypeState.record_protocol_subtype_check(left.type, right.type) + type_state.record_protocol_subtype_check(left.type, right.type) # nominal subtyping currently ignores '__init__' and '__new__' signatures members_not_to_check = {"__init__", "__new__"} members_not_to_check.update(skip) @@ -1078,7 +1078,7 @@ def named_type(fullname: str) -> Instance: subtype_context=SubtypeContext(ignore_pos_arg_names=ignore_names), proper_subtype=proper_subtype, ) - TypeState.record_subtype_cache_entry(subtype_kind, left, right) + type_state.record_subtype_cache_entry(subtype_kind, left, right) return True diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index ae1c613f7563..3343762cfaaf 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -15,7 +15,7 @@ from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options from mypy.types import Type -from mypy.typestate import TypeState +from mypy.typestate import type_state # Only dependencies in these modules are dumped dumped_modules = ["__main__", "pkg", "pkg.mod"] @@ -54,7 +54,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: for source in new_deps: deps[source].update(new_deps[source]) - TypeState.add_all_protocol_deps(deps) + type_state.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): if source.startswith((" {'p.P', ...} in the map. This map is flushed after every incremental # update. - _attempted_protocols: Final[dict[str, set[str]]] = {} + _attempted_protocols: Final[dict[str, set[str]]] # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like -> # instead of a wildcard to avoid unnecessarily invalidating classes. - _checked_against_members: Final[dict[str, set[str]]] = {} + _checked_against_members: Final[dict[str, set[str]]] # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all @@ -75,74 +75,78 @@ class TypeState: # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. - _rechecked_types: Final[set[TypeInfo]] = set() + _rechecked_types: Final[set[TypeInfo]] # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). - _assuming: Final[list[tuple[Type, Type]]] = [] - _assuming_proper: Final[list[tuple[Type, Type]]] = [] + _assuming: Final[list[tuple[Type, Type]]] + _assuming_proper: Final[list[tuple[Type, Type]]] # Ditto for inference of generic constraints against recursive type aliases. - inferring: Final[list[tuple[Type, Type]]] = [] + inferring: Final[list[tuple[Type, Type]]] # Whether to use joins or unions when solving constraints, see checkexpr.py for details. - infer_unions: ClassVar = False + infer_unions: bool # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing # via the cls parameter, since mypyc can optimize accesses to # Final attributes of a directly referenced type. - @staticmethod - def is_assumed_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming): + def __init__(self) -> None: + self._subtype_caches = {} + self.proto_deps = {} + self._attempted_protocols = {} + self._checked_against_members = {} + self._rechecked_types = set() + self._assuming = [] + self._assuming_proper = [] + self.inferring = [] + self.infer_unions = False + + def is_assumed_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def is_assumed_proper_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming_proper): + def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming_proper): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def get_assumptions(is_proper: bool) -> list[tuple[Type, Type]]: + def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: if is_proper: - return TypeState._assuming_proper - return TypeState._assuming + return self._assuming_proper + return self._assuming - @staticmethod - def reset_all_subtype_caches() -> None: + def reset_all_subtype_caches(self) -> None: """Completely reset all known subtype caches.""" - TypeState._subtype_caches.clear() + self._subtype_caches.clear() - @staticmethod - def reset_subtype_caches_for(info: TypeInfo) -> None: + def reset_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" - if info in TypeState._subtype_caches: - TypeState._subtype_caches[info].clear() + if info in self._subtype_caches: + self._subtype_caches[info].clear() - @staticmethod - def reset_all_subtype_caches_for(info: TypeInfo) -> None: + def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" for item in info.mro: - TypeState.reset_subtype_caches_for(item) + self.reset_subtype_caches_for(item) - @staticmethod - def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) -> bool: + def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Instance) -> bool: if left.last_known_value is not None or right.last_known_value is not None: # If there is a literal last known value, give up. There # will be an unbounded number of potential types to cache, # making caching less effective. return False info = right.type - cache = TypeState._subtype_caches.get(info) + cache = self._subtype_caches.get(info) if cache is None: return False subcache = cache.get(kind) @@ -150,36 +154,32 @@ def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) return False return (left, right) in subcache - @staticmethod - def record_subtype_cache_entry(kind: SubtypeKind, left: Instance, right: Instance) -> None: + def record_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: if left.last_known_value is not None or right.last_known_value is not None: # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return - cache = TypeState._subtype_caches.setdefault(right.type, dict()) + cache = self._subtype_caches.setdefault(right.type, dict()) cache.setdefault(kind, set()).add((left, right)) - @staticmethod - def reset_protocol_deps() -> None: + def reset_protocol_deps(self) -> None: """Reset dependencies after a full run or before a daemon shutdown.""" - TypeState.proto_deps = {} - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() - TypeState._rechecked_types.clear() + self.proto_deps = {} + self._attempted_protocols.clear() + self._checked_against_members.clear() + self._rechecked_types.clear() - @staticmethod - def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: + def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol - TypeState._rechecked_types.add(left_type) - TypeState._attempted_protocols.setdefault(left_type.fullname, set()).add( - right_type.fullname - ) - TypeState._checked_against_members.setdefault(left_type.fullname, set()).update( + self._rechecked_types.add(left_type) + self._attempted_protocols.setdefault(left_type.fullname, set()).add(right_type.fullname) + self._checked_against_members.setdefault(left_type.fullname, set()).update( right_type.protocol_members ) - @staticmethod - def _snapshot_protocol_deps() -> dict[str, set[str]]: + def _snapshot_protocol_deps(self) -> dict[str, set[str]]: """Collect protocol attribute dependencies found so far from registered subtype checks. There are three kinds of protocol dependencies. For example, after a subtype check: @@ -209,8 +209,8 @@ def __iter__(self) -> Iterator[int]: 'subtypes.is_protocol_implementation'). """ deps: dict[str, set[str]] = {} - for info in TypeState._rechecked_types: - for attr in TypeState._checked_against_members[info.fullname]: + for info in self._rechecked_types: + for attr in self._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of # a concrete class may not be reprocessed, so not all -> deps # are added. @@ -220,7 +220,7 @@ def __iter__(self) -> Iterator[int]: # TODO: avoid everything from typeshed continue deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) - for proto in TypeState._attempted_protocols[info.fullname]: + for proto in self._attempted_protocols[info.fullname]: trigger = make_trigger(info.fullname) if "typing" in trigger or "builtins" in trigger: continue @@ -233,46 +233,45 @@ def __iter__(self) -> Iterator[int]: deps.setdefault(trigger, set()).add(proto) return deps - @staticmethod - def update_protocol_deps(second_map: dict[str, set[str]] | None = None) -> None: + def update_protocol_deps(self, second_map: dict[str, set[str]] | None = None) -> None: """Update global protocol dependency map. We update the global map incrementally, using a snapshot only from recently type checked types. If second_map is given, update it as well. This is currently used by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. """ - assert ( - TypeState.proto_deps is not None - ), "This should not be called after failed cache load" - new_deps = TypeState._snapshot_protocol_deps() + assert self.proto_deps is not None, "This should not be called after failed cache load" + new_deps = self._snapshot_protocol_deps() for trigger, targets in new_deps.items(): - TypeState.proto_deps.setdefault(trigger, set()).update(targets) + self.proto_deps.setdefault(trigger, set()).update(targets) if second_map is not None: for trigger, targets in new_deps.items(): second_map.setdefault(trigger, set()).update(targets) - TypeState._rechecked_types.clear() - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() + self._rechecked_types.clear() + self._attempted_protocols.clear() + self._checked_against_members.clear() - @staticmethod - def add_all_protocol_deps(deps: dict[str, set[str]]) -> None: + def add_all_protocol_deps(self, deps: dict[str, set[str]]) -> None: """Add all known protocol dependencies to deps. This is used by tests and debug output, and also when collecting all collected or loaded dependencies as part of build. """ - TypeState.update_protocol_deps() # just in case - if TypeState.proto_deps is not None: - for trigger, targets in TypeState.proto_deps.items(): + self.update_protocol_deps() # just in case + if self.proto_deps is not None: + for trigger, targets in self.proto_deps.items(): deps.setdefault(trigger, set()).update(targets) +type_state: Final = TypeState() + + def reset_global_state() -> None: """Reset most existing global state. Currently most of it is in this module. Few exceptions are strict optional status and and functools.lru_cache. """ - TypeState.reset_all_subtype_caches() - TypeState.reset_protocol_deps() + type_state.reset_all_subtype_caches() + type_state.reset_protocol_deps() TypeVarId.next_raw_id = 1 From 87335997808207a138125a2f52fb8f4dc5a69fa3 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 21 Dec 2022 12:56:20 +0000 Subject: [PATCH 135/292] Avoid the use of a context manager in hot code path (#14331) Mypyc can't optimize context managers yet, so it's best to avoid them in hot code paths. This sacrifices some code quality for a considerable perf gain. This improved self-check performance by 4%. --- mypy/checkexpr.py | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index d839ad4925fd..c990e9b59f98 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1583,21 +1583,21 @@ def infer_arg_types_in_empty_context(self, args: list[Expression]) -> list[Type] res.append(arg_type) return res - @contextmanager - def allow_unions(self, type_context: Type) -> Iterator[None]: - # This is a hack to better support inference for recursive types. - # When the outer context for a function call is known to be recursive, - # we solve type constraints inferred from arguments using unions instead - # of joins. This is a bit arbitrary, but in practice it works for most - # cases. A cleaner alternative would be to switch to single bin type - # inference, but this is a lot of work. + def infer_more_unions_for_recursive_type(self, type_context: Type) -> bool: + """Adjust type inference of unions if type context has a recursive type. + + Return the old state. The caller must assign it to type_state.infer_unions + afterwards. + + This is a hack to better support inference for recursive types. + + Note: This is performance-sensitive and must not be a context manager + until mypyc supports them better. + """ old = type_state.infer_unions if has_recursive_types(type_context): type_state.infer_unions = True - try: - yield - finally: - type_state.infer_unions = old + return old def infer_arg_types_in_context( self, @@ -1618,8 +1618,16 @@ def infer_arg_types_in_context( for i, actuals in enumerate(formal_to_actual): for ai in actuals: if not arg_kinds[ai].is_star(): - with self.allow_unions(callee.arg_types[i]): - res[ai] = self.accept(args[ai], callee.arg_types[i]) + arg_type = callee.arg_types[i] + # When the outer context for a function call is known to be recursive, + # we solve type constraints inferred from arguments using unions instead + # of joins. This is a bit arbitrary, but in practice it works for most + # cases. A cleaner alternative would be to switch to single bin type + # inference, but this is a lot of work. + old = self.infer_more_unions_for_recursive_type(arg_type) + res[ai] = self.accept(args[ai], arg_type) + # We need to manually restore union inference state, ugh. + type_state.infer_unions = old # Fill in the rest of the argument types. for i, t in enumerate(res): From 1a7823ecaad94d8da11b626de90c37972c00c43e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 21 Dec 2022 14:13:16 +0000 Subject: [PATCH 136/292] Tool to compare performance of any number of mypy commits/branches (#14332) The script compiles some mypy commits in parallel and then measures how long each version takes to self-check a specific mypy commit. It measures the performance 15 times for each commit and takes the average. Based on some experiments, the noise floor on my Linux desktop is about 0.5% to 1.0%. Any difference above 1.0% is likely significant, I believe. For differences between 0.5% and 1.0% it makes sense to repeat the measurement a few times. The interesting part of the output looks something like this: ``` ... === Results === 145d8a41b17ab1ba8707589be9cb5d56bbebd0ea 8.207s (0.0%) 145d8a41b17ab1ba8707589be9cb5d56bbebd0ea~1 8.105s (-1.2%) ``` Co-authored-by: Alex Waygood --- misc/perf_compare.py | 146 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 misc/perf_compare.py diff --git a/misc/perf_compare.py b/misc/perf_compare.py new file mode 100644 index 000000000000..be05bb6ddc32 --- /dev/null +++ b/misc/perf_compare.py @@ -0,0 +1,146 @@ +"""Compare performance of mypyc-compiled mypy between one or more commits/branches. + +Simple usage: + + python misc/perf_compare.py my-branch master ... + +What this does: + + * Create a temp clone of the mypy repo for each target commit to measure + * Checkout a target commit in each of the clones + * Compile mypyc in each of the clones *in parallel* + * Create another temp clone of the mypy repo as the code to check + * Self check with each of the compiled mypys N times + * Report the average runtimes and relative performance + * Remove the temp clones +""" + +from __future__ import annotations + +import argparse +import glob +import os +import random +import shutil +import statistics +import subprocess +import sys +import threading +import time + + +def heading(s: str) -> None: + print() + print(f"=== {s} ===") + print() + + +def build_mypy(target_dir: str) -> None: + env = os.environ.copy() + env["CC"] = "clang" + env["MYPYC_OPT_LEVEL"] = "2" + cmd = [sys.executable, "setup.py", "--use-mypyc", "build_ext", "--inplace"] + subprocess.run(cmd, env=env, check=True, cwd=target_dir) + + +def clone(target_dir: str, commit: str | None) -> None: + heading(f"Cloning mypy to {target_dir}") + repo_dir = os.getcwd() + if os.path.isdir(target_dir): + print(f"{target_dir} exists: deleting") + shutil.rmtree(target_dir) + subprocess.run(["git", "clone", repo_dir, target_dir], check=True) + if commit: + subprocess.run(["git", "checkout", commit], check=True, cwd=target_dir) + + +def run_benchmark(compiled_dir: str, check_dir: str) -> float: + cache_dir = os.path.join(compiled_dir, ".mypy_cache") + if os.path.isdir(cache_dir): + shutil.rmtree(cache_dir) + env = os.environ.copy() + env["PYTHONPATH"] = os.path.abspath(compiled_dir) + abschk = os.path.abspath(check_dir) + cmd = [ + sys.executable, + "-m", + "mypy", + "--config-file", + os.path.join(abschk, "mypy_self_check.ini"), + ] + cmd += glob.glob(os.path.join(abschk, "mypy/*.py")) + cmd += glob.glob(os.path.join(abschk, "mypy/*/*.py")) + t0 = time.time() + # Ignore errors, since some commits being measured may generate additional errors. + subprocess.run(cmd, cwd=compiled_dir, env=env) + return time.time() - t0 + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument("commit", nargs="+") + args = parser.parse_args() + commits = args.commit + num_runs = 16 + + if not (os.path.isdir(".git") and os.path.isdir("mypyc")): + sys.exit("error: Run this the mypy repo root") + + build_threads = [] + target_dirs = [] + for i, commit in enumerate(commits): + target_dir = f"mypy.{i}.tmpdir" + target_dirs.append(target_dir) + clone(target_dir, commit) + t = threading.Thread(target=lambda: build_mypy(target_dir)) + t.start() + build_threads.append(t) + + self_check_dir = "mypy.self.tmpdir" + clone(self_check_dir, commits[0]) + + heading("Compiling mypy") + print("(This will take a while...)") + + for t in build_threads: + t.join() + + print(f"Finished compiling mypy ({len(commits)} builds)") + + heading("Performing measurements") + + results: dict[str, list[float]] = {} + for n in range(num_runs): + if n == 0: + print("Warmup...") + else: + print(f"Run {n}/{num_runs - 1}...") + items = list(enumerate(commits)) + random.shuffle(items) + for i, commit in items: + tt = run_benchmark(target_dirs[i], self_check_dir) + # Don't record the first warm-up run + if n > 0: + print(f"{commit}: t={tt:.3f}s") + results.setdefault(commit, []).append(tt) + + print() + heading("Results") + first = -1.0 + for commit in commits: + tt = statistics.mean(results[commit]) + if first < 0: + delta = "0.0%" + first = tt + else: + d = (tt / first) - 1 + delta = f"{d:+.1%}" + print(f"{commit:<25} {tt:.3f}s ({delta})") + + shutil.rmtree(self_check_dir) + for target_dir in target_dirs: + shutil.rmtree(target_dir) + + +if __name__ == "__main__": + main() From 25146105416863c4237380354b7f15102448b324 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 21 Dec 2022 14:55:34 +0000 Subject: [PATCH 137/292] Fix docs build (#14334) The docs build is currently failing on `master`: see e.g. https://github.com/python/mypy/actions/runs/3748461486/jobs/6365827914. It looks like it was broken by the release of `attrs` 22.2.0 earlier today. Specifically, it looks like https://github.com/python-attrs/attrs/commit/1bb28648248e1bed63c1c6b077e7fe4b8260efc8 broke mypy's docs build. I think this fixes things. --- docs/source/additional_features.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst index 19e0d4dcce01..ef5bf9e8936d 100644 --- a/docs/source/additional_features.rst +++ b/docs/source/additional_features.rst @@ -177,7 +177,7 @@ Caveats/Known Issues will complain about not understanding the argument and the type annotation in :py:meth:`__init__ ` will be replaced by ``Any``. -* :ref:`Validator decorators ` +* :ref:`Validator decorators ` and `default decorators `_ are not type-checked against the attribute they are setting/validating. From 31b041344eab4b84971924cdcb45ba06dffe6d6c Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 22 Dec 2022 23:27:49 +0000 Subject: [PATCH 138/292] stubtest: Improve heuristics for determining whether global-namespace names are imported (#14270) Stubtest currently has both false-positives and false-negatives when it comes to verifying constants in the global namespace of a module. This PR fixes the false positive by using `inspect.getsourcelines()` to dynamically retrieve the module source code. It then uses `symtable` to analyse that source code to gather a list of names which are known to be imported. The PR fixes the false negative by only using the `__module__` heuristic on objects which are callable. The vast majority of callable objects will be types or functions. For these objects, the `__module__` attribute will give a good indication of whether the object originates from another module or not; for other objects, it's less useful. --- mypy/stubtest.py | 50 ++++++++++++++++++++++++++++++++++----- mypy/test/teststubtest.py | 3 +++ 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index a7a72235fed1..5946324d4619 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -15,6 +15,7 @@ import os import pkgutil import re +import symtable import sys import traceback import types @@ -283,6 +284,36 @@ def _verify_exported_names( ) +def _get_imported_symbol_names(runtime: types.ModuleType) -> frozenset[str] | None: + """Retrieve the names in the global namespace which are known to be imported. + + 1). Use inspect to retrieve the source code of the module + 2). Use symtable to parse the source and retrieve names that are known to be imported + from other modules. + + If either of the above steps fails, return `None`. + + Note that if a set of names is returned, + it won't include names imported via `from foo import *` imports. + """ + try: + source = inspect.getsource(runtime) + except (OSError, TypeError, SyntaxError): + return None + + if not source.strip(): + # The source code for the module was an empty file, + # no point in parsing it with symtable + return frozenset() + + try: + module_symtable = symtable.symtable(source, runtime.__name__, "exec") + except SyntaxError: + return None + + return frozenset(sym.get_name() for sym in module_symtable.get_symbols() if sym.is_imported()) + + @verify.register(nodes.MypyFile) def verify_mypyfile( stub: nodes.MypyFile, runtime: MaybeMissing[types.ModuleType], object_path: list[str] @@ -312,15 +343,22 @@ def verify_mypyfile( if not o.module_hidden and (not is_probably_private(m) or hasattr(runtime, m)) } + imported_symbols = _get_imported_symbol_names(runtime) + def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: obj = getattr(r, attr) - try: - obj_mod = getattr(obj, "__module__", None) - except Exception: + if isinstance(obj, types.ModuleType): return False - if obj_mod is not None: - return bool(obj_mod == r.__name__) - return not isinstance(obj, types.ModuleType) + if callable(obj): + try: + obj_mod = getattr(obj, "__module__", None) + except Exception: + return False + if obj_mod is not None: + return bool(obj_mod == r.__name__) + if imported_symbols is not None: + return attr not in imported_symbols + return True runtime_public_contents = ( runtime_all_as_set diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 812333e3feb4..5e59d8efec63 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1082,6 +1082,9 @@ def test_missing_no_runtime_all(self) -> Iterator[Case]: yield Case(stub="", runtime="import sys", error=None) yield Case(stub="", runtime="def g(): ...", error="g") yield Case(stub="", runtime="CONSTANT = 0", error="CONSTANT") + yield Case(stub="", runtime="import re; constant = re.compile('foo')", error="constant") + yield Case(stub="", runtime="from json.scanner import NUMBER_RE", error=None) + yield Case(stub="", runtime="from string import ascii_letters", error=None) @collect_cases def test_non_public_1(self) -> Iterator[Case]: From c246a527008c28cf4970b28b85f7ccb5ce6ba285 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 25 Dec 2022 14:38:25 -0600 Subject: [PATCH 139/292] Improve searchability for module level type ignore errors (#14342) --- docs/source/common_issues.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 42962581702f..465035307d5d 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -188,6 +188,8 @@ Ignoring a whole file A ``# type: ignore`` comment at the top of a module (before any statements, including imports or docstrings) has the effect of ignoring the entire contents of the module. +This behaviour can be surprising and result in +"Module ... has no attribute ... [attr-defined]" errors. To only ignore errors, use a top-level ``# mypy: ignore-errors`` comment instead. To only ignore errors with a specific error code, use a top-level From 01a1bf6bf4615e548b195b657756f9c57f8631a4 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 26 Dec 2022 10:29:08 +0000 Subject: [PATCH 140/292] Speed up the implementation of hasattr() checks (#14333) This makes the implementation of hasattr() checks faster (introduced in #13544). In particular, since the `extra_attrs` attribute used for hasattr() checks is usually None, I micro-optimized the codepaths to avoid expensive operations whenever there are no hasattr() checks. Also avoid expensive operations on simple unions and order `isinstance` checks so that common types are checked first. I measured a 2% performance uplift in self-check. --- mypy/meet.py | 2 +- mypy/typeops.py | 42 ++++++++++++++++++++++++++++-------------- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/mypy/meet.py b/mypy/meet.py index 5c187eeb37d4..8760b8c6d4fe 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -76,7 +76,7 @@ def meet_types(s: Type, t: Type) -> ProperType: # Code in checker.py should merge any extra_items where possible, so we # should have only compatible extra_items here. We check this before # the below subtype check, so that extra_attrs will not get erased. - if is_same_type(s, t) and (s.extra_attrs or t.extra_attrs): + if (s.extra_attrs or t.extra_attrs) and is_same_type(s, t): if s.extra_attrs and t.extra_attrs: if len(s.extra_attrs.attrs) > len(t.extra_attrs.attrs): # Return the one that has more precise information. diff --git a/mypy/typeops.py b/mypy/typeops.py index 9f224e02c088..baf5b8552eff 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -33,6 +33,7 @@ ENUM_REMOVED_PROPS, AnyType, CallableType, + ExtraAttrs, FormalArgument, FunctionLike, Instance, @@ -466,16 +467,27 @@ def make_simplified_union( result = get_proper_type(UnionType.make_union(simplified_set, line, column)) - # Step 4: At last, we erase any (inconsistent) extra attributes on instances. - extra_attrs_set = set() - for item in items: - instance = try_getting_instance_fallback(item) - if instance and instance.extra_attrs: - extra_attrs_set.add(instance.extra_attrs) - - fallback = try_getting_instance_fallback(result) - if len(extra_attrs_set) > 1 and fallback: - fallback.extra_attrs = None + nitems = len(items) + if nitems > 1 and ( + nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) + ): + # Step 4: At last, we erase any (inconsistent) extra attributes on instances. + + # Initialize with None instead of an empty set as a micro-optimization. The set + # is needed very rarely, so we try to avoid constructing it. + extra_attrs_set: set[ExtraAttrs] | None = None + for item in items: + instance = try_getting_instance_fallback(item) + if instance and instance.extra_attrs: + if extra_attrs_set is None: + extra_attrs_set = {instance.extra_attrs} + else: + extra_attrs_set.add(instance.extra_attrs) + + if extra_attrs_set is not None and len(extra_attrs_set) > 1: + fallback = try_getting_instance_fallback(result) + if fallback: + fallback.extra_attrs = None return result @@ -1006,13 +1018,15 @@ def try_getting_instance_fallback(typ: Type) -> Instance | None: typ = get_proper_type(typ) if isinstance(typ, Instance): return typ - elif isinstance(typ, TupleType): - return typ.partial_fallback - elif isinstance(typ, TypedDictType): + elif isinstance(typ, LiteralType): return typ.fallback + elif isinstance(typ, NoneType): + return None # Fast path for None, which is common elif isinstance(typ, FunctionLike): return typ.fallback - elif isinstance(typ, LiteralType): + elif isinstance(typ, TupleType): + return typ.partial_fallback + elif isinstance(typ, TypedDictType): return typ.fallback elif isinstance(typ, TypeVarType): return try_getting_instance_fallback(typ.upper_bound) From 5349f9a6e3ea53a7def2e895cfb3bc3938880052 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 26 Dec 2022 10:29:51 +0000 Subject: [PATCH 141/292] Avoid slow error message logic if errors not shown to user (#14336) This helps with await-related errors introduced in #12958, in particular, which are expensive to generate. If errors are ignored (e.g. in third-party libraries) or we don't care about the error message, use simpler error message logic. We also often filter out error messages temporarily, so any effort in constructing a nice error message is wasted. We could skip even more logic, but this should cover many of the important code paths. This speeds up self check by about 2%. --- mypy/checker.py | 4 ++ mypy/checkexpr.py | 3 +- mypy/checkmember.py | 5 +- mypy/errors.py | 18 ++++++ mypy/messages.py | 133 ++++++++++++++++++++++++++------------------ 5 files changed, 106 insertions(+), 57 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 119ce6ae6338..7c5fcba1bb09 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5936,6 +5936,10 @@ def check_subtype( if isinstance(msg, str): msg = ErrorMessage(msg, code=code) + if self.msg.prefer_simple_messages(): + self.fail(msg, context) # Fast path -- skip all fancy logic + return False + orig_subtype = subtype subtype = get_proper_type(subtype) orig_supertype = supertype diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index c990e9b59f98..5993639be406 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2157,7 +2157,8 @@ def check_arg( self.msg.incompatible_argument_note( original_caller_type, callee_type, context, code=code ) - self.chk.check_possible_missing_await(caller_type, callee_type, context) + if not self.msg.prefer_simple_messages(): + self.chk.check_possible_missing_await(caller_type, callee_type, context) def check_overload_call( self, diff --git a/mypy/checkmember.py b/mypy/checkmember.py index e3c7c95a64b9..918ce7520454 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -272,8 +272,9 @@ def report_missing_attribute( override_info: TypeInfo | None = None, ) -> Type: res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) - if may_be_awaitable_attribute(name, typ, mx, override_info): - mx.msg.possible_missing_await(mx.context) + if not mx.msg.prefer_simple_messages(): + if may_be_awaitable_attribute(name, typ, mx, override_info): + mx.msg.possible_missing_await(mx.context) return res_type diff --git a/mypy/errors.py b/mypy/errors.py index bfc44a858010..d1e13ad701fc 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -737,6 +737,24 @@ def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" return file in self.error_info_map + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + Return True if errors are not shown to user, i.e. errors are ignored + or they are collected for internal use only. + + If True, we should prefer to generate a simple message quickly. + All normal errors should still be reported. + """ + if self.file in self.ignored_files: + # Errors ignored, so no point generating fancy messages + return True + for _watcher in self._watchers: + if _watcher._filter is True and _watcher._filtered is None: + # Errors are filtered + return True + return False + def raise_error(self, use_stdout: bool = True) -> NoReturn: """Raise a CompileError with the generated messages. diff --git a/mypy/messages.py b/mypy/messages.py index 85811561e176..b8c04fe2b8e9 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -191,6 +191,14 @@ def disable_type_names(self) -> Iterator[None]: def are_type_names_disabled(self) -> bool: return len(self._disable_type_names) > 0 and self._disable_type_names[-1] + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + If errors aren't shown to the user, we don't want to waste cyles producing + complex error messages. + """ + return self.errors.prefer_simple_messages() + def report( self, msg: str, @@ -685,64 +693,69 @@ def incompatible_argument( actual_type_str, expected_type_str ) else: - try: - expected_type = callee.arg_types[m - 1] - except IndexError: # Varargs callees - expected_type = callee.arg_types[-1] - arg_type_str, expected_type_str = format_type_distinctly( - arg_type, expected_type, bare=True - ) - if arg_kind == ARG_STAR: - arg_type_str = "*" + arg_type_str - elif arg_kind == ARG_STAR2: - arg_type_str = "**" + arg_type_str - - # For function calls with keyword arguments, display the argument name rather than the - # number. - arg_label = str(n) - if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: - arg_name = outer_context.arg_names[n - 1] - if arg_name is not None: - arg_label = f'"{arg_name}"' - if ( - arg_kind == ARG_STAR2 - and isinstance(arg_type, TypedDictType) - and m <= len(callee.arg_names) - and callee.arg_names[m - 1] is not None - and callee.arg_kinds[m - 1] != ARG_STAR2 - ): - arg_name = callee.arg_names[m - 1] - assert arg_name is not None - arg_type_str, expected_type_str = format_type_distinctly( - arg_type.items[arg_name], expected_type, bare=True - ) - arg_label = f'"{arg_name}"' - if isinstance(outer_context, IndexExpr) and isinstance(outer_context.index, StrExpr): - msg = 'Value of "{}" has incompatible type {}; expected {}'.format( - outer_context.index.value, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), - ) + if self.prefer_simple_messages(): + msg = "Argument has incompatible type" else: - msg = "Argument {} {}has incompatible type {}; expected {}".format( - arg_label, - target, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), + try: + expected_type = callee.arg_types[m - 1] + except IndexError: # Varargs callees + expected_type = callee.arg_types[-1] + arg_type_str, expected_type_str = format_type_distinctly( + arg_type, expected_type, bare=True ) + if arg_kind == ARG_STAR: + arg_type_str = "*" + arg_type_str + elif arg_kind == ARG_STAR2: + arg_type_str = "**" + arg_type_str + + # For function calls with keyword arguments, display the argument name rather + # than the number. + arg_label = str(n) + if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: + arg_name = outer_context.arg_names[n - 1] + if arg_name is not None: + arg_label = f'"{arg_name}"' + if ( + arg_kind == ARG_STAR2 + and isinstance(arg_type, TypedDictType) + and m <= len(callee.arg_names) + and callee.arg_names[m - 1] is not None + and callee.arg_kinds[m - 1] != ARG_STAR2 + ): + arg_name = callee.arg_names[m - 1] + assert arg_name is not None + arg_type_str, expected_type_str = format_type_distinctly( + arg_type.items[arg_name], expected_type, bare=True + ) + arg_label = f'"{arg_name}"' + if isinstance(outer_context, IndexExpr) and isinstance( + outer_context.index, StrExpr + ): + msg = 'Value of "{}" has incompatible type {}; expected {}'.format( + outer_context.index.value, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + else: + msg = "Argument {} {}has incompatible type {}; expected {}".format( + arg_label, + target, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + expected_type = get_proper_type(expected_type) + if isinstance(expected_type, UnionType): + expected_types = list(expected_type.items) + else: + expected_types = [expected_type] + for type in get_proper_types(expected_types): + if isinstance(arg_type, Instance) and isinstance(type, Instance): + notes = append_invariance_notes(notes, arg_type, type) object_type = get_proper_type(object_type) if isinstance(object_type, TypedDictType): code = codes.TYPEDDICT_ITEM else: code = codes.ARG_TYPE - expected_type = get_proper_type(expected_type) - if isinstance(expected_type, UnionType): - expected_types = list(expected_type.items) - else: - expected_types = [expected_type] - for type in get_proper_types(expected_types): - if isinstance(arg_type, Instance) and isinstance(type, Instance): - notes = append_invariance_notes(notes, arg_type, type) self.fail(msg, context, code=code) if notes: for note_msg in notes: @@ -756,6 +769,8 @@ def incompatible_argument_note( context: Context, code: ErrorCode | None, ) -> None: + if self.prefer_simple_messages(): + return if isinstance( original_caller_type, (Instance, TupleType, TypedDictType, TypeType, CallableType) ): @@ -832,7 +847,9 @@ def invalid_index_type( def too_few_arguments( self, callee: CallableType, context: Context, argument_names: Sequence[str | None] | None ) -> None: - if argument_names is not None: + if self.prefer_simple_messages(): + msg = "Too few arguments" + elif argument_names is not None: num_positional_args = sum(k is None for k in argument_names) arguments_left = callee.arg_names[num_positional_args : callee.min_args] diff = [k for k in arguments_left if k not in argument_names] @@ -856,7 +873,10 @@ def missing_named_argument(self, callee: CallableType, context: Context, name: s self.fail(msg, context, code=codes.CALL_ARG) def too_many_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many arguments" + else: + msg = "Too many arguments" + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) self.maybe_note_about_special_args(callee, context) @@ -874,11 +894,16 @@ def too_many_arguments_from_typed_dict( self.fail(msg, context) def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many positional arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many positional arguments" + else: + msg = "Too many positional arguments" + for_function(callee) self.fail(msg, context) self.maybe_note_about_special_args(callee, context) def maybe_note_about_special_args(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + return # https://github.com/python/mypy/issues/11309 first_arg = callee.def_extras.get("first_arg") if first_arg and first_arg not in {"self", "cls", "mcs"}: From 632304f90923ba38483d94de3cf58d486966f4f0 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Dec 2022 23:24:21 -0500 Subject: [PATCH 142/292] mypy_primer_comment: Resolve node.js 12 actions deprecation warning (#14352) Resolves the following warning: ![image](https://user-images.githubusercontent.com/1350584/209748053-dc33f93c-5c34-4cd2-863f-6e54207d9b4f.png) By using https://github.com/kanga333/comment-hider/releases/tag/v0.4.0 --- .github/workflows/mypy_primer_comment.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index b20eaf471c9a..12ce91c12910 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -90,8 +90,8 @@ jobs: return prNumber - name: Hide old comments - # v0.3.0 - uses: kanga333/comment-hider@bbdf5b562fbec24e6f60572d8f712017428b92e0 + # v0.4.0 + uses: kanga333/comment-hider@c12bb20b48aeb8fc098e35967de8d4f8018fffdf with: github_token: ${{ secrets.GITHUB_TOKEN }} leave_visible: 1 From 61a21baf7e37069b829f19bd74416c01f1b84404 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 13:23:42 +0000 Subject: [PATCH 143/292] Micro-optimization: avoid Bogus[int] types that cause needless boxing (#14354) I want to get rid of all the bogus types eventually. --- mypy/types.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 86a700d52469..480af0fa852b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -153,6 +153,9 @@ # A placeholder used for Bogus[...] parameters _dummy: Final[Any] = object() +# A placeholder for int parameters +_dummy_int: Final = -999999 + class TypeOfAny: """ @@ -540,8 +543,8 @@ def copy_modified( values: Bogus[list[Type]] = _dummy, upper_bound: Bogus[Type] = _dummy, id: Bogus[TypeVarId | int] = _dummy, - line: Bogus[int] = _dummy, - column: Bogus[int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, ) -> TypeVarType: return TypeVarType( self.name, @@ -550,8 +553,8 @@ def copy_modified( self.values if values is _dummy else values, self.upper_bound if upper_bound is _dummy else upper_bound, self.variance, - self.line if line is _dummy else line, - self.column if column is _dummy else column, + self.line if line == _dummy_int else line, + self.column if column == _dummy_int else column, ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -658,14 +661,14 @@ def copy_modified( self, *, id: Bogus[TypeVarId | int] = _dummy, - flavor: Bogus[int] = _dummy, + flavor: int = _dummy_int, prefix: Bogus[Parameters] = _dummy, ) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, id if id is not _dummy else self.id, - flavor if flavor is not _dummy else self.flavor, + flavor if flavor != _dummy_int else self.flavor, self.upper_bound, line=self.line, column=self.column, @@ -1024,10 +1027,10 @@ def accept(self, visitor: TypeVisitor[T]) -> T: def copy_modified( self, # Mark with Bogus because _dummy is just an object (with type Any) - type_of_any: Bogus[int] = _dummy, + type_of_any: int = _dummy_int, original_any: Bogus[AnyType | None] = _dummy, ) -> AnyType: - if type_of_any is _dummy: + if type_of_any == _dummy_int: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any @@ -1745,8 +1748,8 @@ def copy_modified( name: Bogus[str | None] = _dummy, definition: Bogus[SymbolNode] = _dummy, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, - line: Bogus[int] = _dummy, - column: Bogus[int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, is_ellipsis_args: Bogus[bool] = _dummy, implicit: Bogus[bool] = _dummy, special_sig: Bogus[str | None] = _dummy, @@ -1766,8 +1769,8 @@ def copy_modified( name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, - line=line if line is not _dummy else self.line, - column=column if column is not _dummy else self.column, + line=line if line != _dummy_int else self.line, + column=column if column != _dummy_int else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), From 1f8621c483baf4641c317ad755f814bcb9e41296 Mon Sep 17 00:00:00 2001 From: Hugues Date: Wed, 28 Dec 2022 05:58:38 -0800 Subject: [PATCH 144/292] subtypes: fast path for Union/Union subtype check (#14277) Enums are exploded into Union of Literal when narrowed. Conditional branches on enum values can result in multiple distinct narrowing of the same enum which are later subject to subtype checks (most notably via `is_same_type`, when exiting frame context in the binder). Such checks would have quadratic complexity: `O(N*M)` where `N` and `M` are the number of entries in each narrowed enum variable, and led to drastic slowdown if any of the enums involved has a large number of values. Implement a linear-time fast path where literals are quickly filtered, with a fallback to the slow path for more complex values. In our codebase there is one method with a chain of a dozen `if` statements operating on instances of an enum with a hundreds of values. Prior to the regression it was typechecked in less than 1s. After the regression it takes over 13min to typecheck. This patch fully fixes the regression for us. Fixes #13821. --- mypy/subtypes.py | 30 ++++++++++++++++++++++++++++++ mypy/types.py | 9 +++++++++ 2 files changed, 39 insertions(+) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 994c4081addd..61ba7af5147f 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -57,6 +57,7 @@ UninhabitedType, UnionType, UnpackType, + _flattened, get_proper_type, is_named_instance, ) @@ -891,6 +892,35 @@ def visit_union_type(self, left: UnionType) -> bool: if not self._is_subtype(item, self.orig_right): return False return True + + elif isinstance(self.right, UnionType): + # prune literals early to avoid nasty quadratic behavior which would otherwise arise when checking + # subtype relationships between slightly different narrowings of an Enum + # we achieve O(N+M) instead of O(N*M) + + fast_check: set[ProperType] = set() + + for item in _flattened(self.right.relevant_items()): + p_item = get_proper_type(item) + if isinstance(p_item, LiteralType): + fast_check.add(p_item) + elif isinstance(p_item, Instance): + if p_item.last_known_value is None: + fast_check.add(p_item) + else: + fast_check.add(p_item.last_known_value) + + for item in left.relevant_items(): + p_item = get_proper_type(item) + if p_item in fast_check: + continue + lit_type = mypy.typeops.simple_literal_type(p_item) + if lit_type in fast_check: + continue + if not self._is_subtype(item, self.orig_right): + return False + return True + return all(self._is_subtype(item, self.orig_right) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: diff --git a/mypy/types.py b/mypy/types.py index 480af0fa852b..83c5b88032a3 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3346,6 +3346,15 @@ def has_recursive_types(typ: Type) -> bool: return typ.accept(_has_recursive_type) +def _flattened(types: Iterable[Type]) -> Iterable[Type]: + for t in types: + tp = get_proper_type(t) + if isinstance(tp, UnionType): + yield from _flattened(tp.items) + else: + yield t + + def flatten_nested_unions( types: Iterable[Type], handle_type_alias_type: bool = True ) -> list[Type]: From 109c8ce84685136d1cd89c86d4b3458272a6144f Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Wed, 28 Dec 2022 06:01:14 -0800 Subject: [PATCH 145/292] [undefined vars] fix per-module error code override bug (#14351) This one would occur because we set the errors module with global options, instead of per-module override ones. It only mattered for checks that happened after the partially undefined checks, which (I believe) is only the unused `type: ignore` checks. This was discovered when updating tests for #14166. I've also cleaned up the function signature a little. --- mypy/build.py | 10 +++++----- mypy/server/update.py | 2 +- test-data/unit/check-flags.test | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 2e0fa455554a..1747c4518c63 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2347,19 +2347,19 @@ def type_check_second_pass(self) -> bool: self.time_spent_us += time_spent_us(t0) return result - def detect_possibly_undefined_vars(self, type_map: dict[Expression, Type]) -> None: + def detect_possibly_undefined_vars(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" if self.tree.is_stub: # We skip stub files because they aren't actually executed. return manager = self.manager + manager.errors.set_file(self.xpath, self.tree.fullname, options=self.options) if manager.errors.is_error_code_enabled( codes.POSSIBLY_UNDEFINED ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): - manager.errors.set_file(self.xpath, self.tree.fullname, options=manager.options) self.tree.accept( PossiblyUndefinedVariableVisitor( - MessageBuilder(manager.errors, manager.modules), type_map, manager.options + MessageBuilder(manager.errors, manager.modules), self.type_map(), self.options ) ) @@ -3418,7 +3418,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No graph[id].type_check_first_pass() if not graph[id].type_checker().deferred_nodes: unfinished_modules.discard(id) - graph[id].detect_possibly_undefined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() while unfinished_modules: @@ -3427,7 +3427,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No continue if not graph[id].type_check_second_pass(): unfinished_modules.discard(id) - graph[id].detect_possibly_undefined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() for id in stale: graph[id].generate_unused_ignore_notes() diff --git a/mypy/server/update.py b/mypy/server/update.py index 9bea1998c0e5..83cce22873a1 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -662,7 +662,7 @@ def restore(ids: list[str]) -> None: state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() - state.detect_possibly_undefined_vars(state.type_map()) + state.detect_possibly_undefined_vars() t2 = time.time() state.finish_passes() t3 = time.time() diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 33723b7fee76..a76463e3106b 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2110,7 +2110,7 @@ if foo: ... # E: Function "Callable[[], int]" could always be true in boolean c 42 + "no" # type: ignore [file mypy.ini] \[mypy] -enable_error_code = ignore-without-code, truthy-bool +enable_error_code = ignore-without-code, truthy-bool, used-before-def \[mypy-tests.*] disable_error_code = ignore-without-code From 20e9733e32ddff6ecbaddb7be61ae36b6c4978d0 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Wed, 28 Dec 2022 06:16:31 -0800 Subject: [PATCH 146/292] [undefined vars] do not double report errors in class defs (#14350) These errors are already reported by the (new) semantic analyzer. I've discovered this while updating unit tests for new semanal in #14166. Tests are included. --- mypy/partially_defined.py | 49 +++++++++++++------- test-data/unit/check-possibly-undefined.test | 27 +++++++++++ 2 files changed, 60 insertions(+), 16 deletions(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 644a47248615..c63c62c3e393 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -1,5 +1,7 @@ from __future__ import annotations +from enum import Enum + from mypy import checker, errorcodes from mypy.messages import MessageBuilder from mypy.nodes import ( @@ -164,13 +166,21 @@ def done(self) -> BranchState: ) +class ScopeType(Enum): + Global = 1 + Class = 2 + Func = 3 + Generator = 3 + + class Scope: - def __init__(self, stmts: list[BranchStatement]) -> None: + def __init__(self, stmts: list[BranchStatement], scope_type: ScopeType) -> None: self.branch_stmts: list[BranchStatement] = stmts + self.scope_type = scope_type self.undefined_refs: dict[str, set[NameExpr]] = {} def copy(self) -> Scope: - result = Scope([s.copy() for s in self.branch_stmts]) + result = Scope([s.copy() for s in self.branch_stmts], self.scope_type) result.undefined_refs = self.undefined_refs.copy() return result @@ -188,7 +198,7 @@ class DefinedVariableTracker: def __init__(self) -> None: # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. - self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())])] + self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())], ScopeType.Global)] # disable_branch_skip is used to disable skipping a branch due to a return/raise/etc. This is useful # in things like try/except/finally statements. self.disable_branch_skip = False @@ -203,13 +213,18 @@ def _scope(self) -> Scope: assert len(self.scopes) > 0 return self.scopes[-1] - def enter_scope(self) -> None: + def enter_scope(self, scope_type: ScopeType) -> None: assert len(self._scope().branch_stmts) > 0 - self.scopes.append(Scope([BranchStatement(self._scope().branch_stmts[-1].branches[-1])])) + self.scopes.append( + Scope([BranchStatement(self._scope().branch_stmts[-1].branches[-1])], scope_type) + ) def exit_scope(self) -> None: self.scopes.pop() + def in_scope(self, scope_type: ScopeType) -> bool: + return self._scope().scope_type == scope_type + def start_branch_statement(self) -> None: assert len(self._scope().branch_stmts) > 0 self._scope().branch_stmts.append( @@ -320,12 +335,14 @@ def variable_may_be_undefined(self, name: str, context: Context) -> None: def process_definition(self, name: str) -> None: # Was this name previously used? If yes, it's a used-before-definition error. - refs = self.tracker.pop_undefined_ref(name) - for ref in refs: - if self.loops: - self.variable_may_be_undefined(name, ref) - else: - self.var_used_before_def(name, ref) + if not self.tracker.in_scope(ScopeType.Class): + # Errors in class scopes are caught by the semantic analyzer. + refs = self.tracker.pop_undefined_ref(name) + for ref in refs: + if self.loops: + self.variable_may_be_undefined(name, ref) + else: + self.var_used_before_def(name, ref) self.tracker.record_definition(name) def visit_global_decl(self, o: GlobalDecl) -> None: @@ -392,7 +409,7 @@ def visit_match_stmt(self, o: MatchStmt) -> None: def visit_func_def(self, o: FuncDef) -> None: self.process_definition(o.name) - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Func) super().visit_func_def(o) self.tracker.exit_scope() @@ -405,14 +422,14 @@ def visit_func(self, o: FuncItem) -> None: super().visit_func(o) def visit_generator_expr(self, o: GeneratorExpr) -> None: - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Generator) for idx in o.indices: self.process_lvalue(idx) super().visit_generator_expr(o) self.tracker.exit_scope() def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Generator) for idx in o.indices: self.process_lvalue(idx) super().visit_dictionary_comprehension(o) @@ -446,7 +463,7 @@ def visit_return_stmt(self, o: ReturnStmt) -> None: self.tracker.skip_branch() def visit_lambda_expr(self, o: LambdaExpr) -> None: - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Func) super().visit_lambda_expr(o) self.tracker.exit_scope() @@ -613,7 +630,7 @@ def visit_with_stmt(self, o: WithStmt) -> None: def visit_class_def(self, o: ClassDef) -> None: self.process_definition(o.name) - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Class) super().visit_class_def(o) self.tracker.exit_scope() diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index aa535a1ce081..802635c30b35 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -931,3 +931,30 @@ def f(): x = 0 z = y # E: Name "y" is used before definition y: int = x # E: Name "x" may be undefined + +[case testClassBody] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +class A: + # The following should not only trigger an error from semantic analyzer, but not the used-before-def check. + y = x + 1 # E: Name "x" is not defined + x = 0 + # Same as above but in a loop, which should trigger a possibly-undefined error. + for _ in [1, 2, 3]: + b = a + 1 # E: Name "a" is not defined + a = 0 + + +class B: + if int(): + x = 0 + else: + # This type of check is not caught by the semantic analyzer. If we ever update it to catch such issues, + # we should make sure that errors are not double-reported. + y = x # E: Name "x" is used before definition + for _ in [1, 2, 3]: + if int(): + a = 0 + else: + # Same as above but in a loop. + b = a # E: Name "a" may be undefined From 45bed9c994b1082a050e26958c1847b53ad82357 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 14:23:35 +0000 Subject: [PATCH 147/292] Optimization: Remove expensive context manager in type analyzer (#14357) This makes mypy a bit faster and the implementation seems a little cleaner as well. (Various small optimizations, including this, together netted a 6% performance improvement in self check.) --- mypy/typeanal.py | 68 +++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 29 deletions(-) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index e4f56924d2d7..28f293613d50 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -268,10 +268,13 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.api.record_incomplete_ref() # Always allow ParamSpec for placeholders, if they are actually not valid, # they will be reported later, after we resolve placeholders. - with self.set_allow_param_spec_literals(True): - return PlaceholderType( - node.fullname, self.anal_array(t.args, allow_param_spec=True), t.line - ) + return PlaceholderType( + node.fullname, + self.anal_array( + t.args, allow_param_spec=True, allow_param_spec_literals=True + ), + t.line, + ) else: if self.api.final_iteration: self.cannot_resolve_type(t) @@ -382,10 +385,13 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) - with self.set_allow_param_spec_literals(node.has_param_spec_type): - an_args = self.anal_array(t.args, allow_param_spec=True) - if node.has_param_spec_type and len(node.alias_tvars) == 1: - an_args = self.pack_paramspec_args(an_args) + an_args = self.anal_array( + t.args, + allow_param_spec=True, + allow_param_spec_literals=node.has_param_spec_type, + ) + if node.has_param_spec_type and len(node.alias_tvars) == 1: + an_args = self.pack_paramspec_args(an_args) disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = expand_type_alias( @@ -660,17 +666,22 @@ def analyze_type_with_type_info( fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args), fallback, ctx.line) - # This is a heuristic: it will be checked later anyways but the error - # message may be worse. - with self.set_allow_param_spec_literals(info.has_param_spec_type): - # Analyze arguments and (usually) construct Instance type. The - # number of type arguments and their values are - # checked only later, since we do not always know the - # valid count at this point. Thus we may construct an - # Instance with an invalid number of type arguments. - instance = Instance( - info, self.anal_array(args, allow_param_spec=True), ctx.line, ctx.column - ) + # Analyze arguments and (usually) construct Instance type. The + # number of type arguments and their values are + # checked only later, since we do not always know the + # valid count at this point. Thus we may construct an + # Instance with an invalid number of type arguments. + # + # We allow ParamSpec literals based on a heuristic: it will be + # checked later anyways but the error message may be worse. + instance = Instance( + info, + self.anal_array( + args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type + ), + ctx.line, + ctx.column, + ) if len(info.type_vars) == 1 and info.has_param_spec_type: instance.args = tuple(self.pack_paramspec_args(instance.args)) @@ -1466,11 +1477,19 @@ def is_defined_type_var(self, tvar: str, context: Context) -> bool: return self.tvar_scope.get_binding(tvar_node) is not None def anal_array( - self, a: Iterable[Type], nested: bool = True, *, allow_param_spec: bool = False + self, + a: Iterable[Type], + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_param_spec_literals: bool = False, ) -> list[Type]: + old_allow_param_spec_literals = self.allow_param_spec_literals + self.allow_param_spec_literals = allow_param_spec_literals res: list[Type] = [] for t in a: res.append(self.anal_type(t, nested, allow_param_spec=allow_param_spec)) + self.allow_param_spec_literals = old_allow_param_spec_literals return self.check_unpacks_in_list(res) def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = False) -> Type: @@ -1558,15 +1577,6 @@ def tuple_type(self, items: list[Type]) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType(items, fallback=self.named_type("builtins.tuple", [any_type])) - @contextmanager - def set_allow_param_spec_literals(self, to: bool) -> Iterator[None]: - old = self.allow_param_spec_literals - try: - self.allow_param_spec_literals = to - yield - finally: - self.allow_param_spec_literals = old - TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]] From ec4404ae331170f6d033f442906a547dec319e02 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 14:24:07 +0000 Subject: [PATCH 148/292] Optimization: Enable always defined attributes in Type subclasses (#14356) Use lazy initialization to avoid method calls in `__init__`. This allows mypyc to infer more always defined attributes. (Various small optimizations, including this, together netted a 6% performance improvement in self check.) --- mypy/types.py | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 83c5b88032a3..b48280466c3e 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -201,7 +201,7 @@ def deserialize_type(data: JsonDict | str) -> Type: class Type(mypy.nodes.Context): """Abstract base class for all types.""" - __slots__ = ("can_be_true", "can_be_false") + __slots__ = ("_can_be_true", "_can_be_false") # 'can_be_true' and 'can_be_false' mean whether the value of the # expression can be true or false in a boolean context. They are useful # when inferring the type of logic expressions like `x and y`. @@ -214,8 +214,29 @@ class Type(mypy.nodes.Context): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_true = self.can_be_true_default() - self.can_be_false = self.can_be_false_default() + # Value of these can be -1 (use the default, lazy init), 0 (false) or 1 (true) + self._can_be_true = -1 + self._can_be_false = -1 + + @property + def can_be_true(self) -> bool: + if self._can_be_true == -1: # Lazy init helps mypyc + self._can_be_true = self.can_be_true_default() + return bool(self._can_be_true) + + @can_be_true.setter + def can_be_true(self, v: bool) -> None: + self._can_be_true = v + + @property + def can_be_false(self) -> bool: + if self._can_be_false == -1: # Lazy init helps mypyc + self._can_be_false = self.can_be_false_default() + return bool(self._can_be_false) + + @can_be_false.setter + def can_be_false(self, v: bool) -> None: + self._can_be_false = v def can_be_true_default(self) -> bool: return True @@ -264,10 +285,10 @@ def __init__( line: int = -1, column: int = -1, ) -> None: + super().__init__(line, column) self.alias = alias self.args = args self.type_ref: str | None = None - super().__init__(line, column) def _expand_once(self) -> Type: """Expand to the target type exactly once. @@ -1424,7 +1445,7 @@ class FunctionLike(ProperType): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_false = False + self._can_be_false = False @abstractmethod def is_type_obj(self) -> bool: @@ -2183,10 +2204,10 @@ def __init__( column: int = -1, implicit: bool = False, ) -> None: + super().__init__(line, column) self.partial_fallback = fallback self.items = items self.implicit = implicit - super().__init__(line, column) def can_be_true_default(self) -> bool: if self.can_be_any_bool(): @@ -2495,8 +2516,8 @@ class LiteralType(ProperType): def __init__( self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1 ) -> None: - self.value = value super().__init__(line, column) + self.value = value self.fallback = fallback self._hash = -1 # Cached hash value From c29a414695be922bdcbdf6d1df9881ce7f28ccbf Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 15:30:07 +0000 Subject: [PATCH 149/292] Require setuptools>=65.5.1 (#14355) Address dependabot alert about security vulnerability (https://nvd.nist.gov/vuln/detail/CVE-2022-40897). --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 8ae94237f5ea..76255044e2dd 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -15,5 +15,5 @@ pytest-xdist>=1.34.0 pytest-forked>=1.3.0,<2.0.0 pytest-cov>=2.10.0 py>=1.5.2 -setuptools!=50 +setuptools>=65.5.1 six From 8e7e22001646034586523a239056f657980451eb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 15:38:15 +0000 Subject: [PATCH 150/292] Optimization: Avoid a few uses of contextmanagers in semantic analyzer (#14360) This helps mypyc. (Various small optimizations, including this, together netted a 6% performance improvement in self check.) --- mypy/semanal.py | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 916009702830..eceb96ca63ee 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -50,7 +50,7 @@ from __future__ import annotations -from contextlib import contextmanager, nullcontext +from contextlib import contextmanager from typing import Any, Callable, Collection, Iterable, Iterator, List, TypeVar, cast from typing_extensions import Final, TypeAlias as _TypeAlias @@ -2645,11 +2645,15 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: # But we can't use a full visit because it may emit extra incomplete refs (namely # when analysing any type applications there) thus preventing the further analysis. # To break the tie, we first analyse rvalue partially, if it can be a type alias. - with self.basic_type_applications_set(s): - with self.allow_unbound_tvars_set() if self.can_possibly_be_index_alias( - s - ) else nullcontext(): + if self.can_possibly_be_index_alias(s): + old_basic_type_applications = self.basic_type_applications + self.basic_type_applications = True + with self.allow_unbound_tvars_set(): s.rvalue.accept(self) + self.basic_type_applications = old_basic_type_applications + else: + s.rvalue.accept(self) + if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): # Initializer couldn't be fully analyzed. Defer the current node and give up. # Make sure that if we skip the definition of some local names, they can't be @@ -2819,17 +2823,6 @@ def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: # Something that looks like Foo = Bar[Baz, ...] return True - @contextmanager - def basic_type_applications_set(self, s: AssignmentStmt) -> Iterator[None]: - old = self.basic_type_applications - # As an optimization, only use the double visit logic if this - # can possibly be a recursive type alias. - self.basic_type_applications = self.can_possibly_be_index_alias(s) - try: - yield - finally: - self.basic_type_applications = old - def is_type_ref(self, rv: Expression, bare: bool = False) -> bool: """Does this expression refer to a type? From 86dad8ada970a7e37433b74c27019db29f607abc Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Dec 2022 15:41:47 +0000 Subject: [PATCH 151/292] [mypyc] Simplify union types (#14363) We can sometimes simplify a mypyc RType union, even if the mypy union couldn't be simplified. A typical example is `list[x] | list[y]` which can be simplified to just `list`. Previously this would generate a redundant union `union[list, list]`. --- mypyc/irbuild/mapper.py | 13 +++++++++- mypyc/test-data/irbuild-lists.test | 37 +++++++++++++++++++++++++++ mypyc/test-data/irbuild-optional.test | 14 ++++------ 3 files changed, 54 insertions(+), 10 deletions(-) diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 4364b2b6c511..a108766644ce 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -116,7 +116,18 @@ def type_to_rtype(self, typ: Type | None) -> RType: elif isinstance(typ, NoneTyp): return none_rprimitive elif isinstance(typ, UnionType): - return RUnion([self.type_to_rtype(item) for item in typ.items]) + # Remove redundant items using set + list to preserve item order + seen = set() + items = [] + for item in typ.items: + rtype = self.type_to_rtype(item) + if rtype not in seen: + items.append(rtype) + seen.add(rtype) + if len(items) > 1: + return RUnion(items) + else: + return items[0] elif isinstance(typ, AnyType): return object_rprimitive elif isinstance(typ, TypeType): diff --git a/mypyc/test-data/irbuild-lists.test b/mypyc/test-data/irbuild-lists.test index 47f7ada709e3..b82217465fef 100644 --- a/mypyc/test-data/irbuild-lists.test +++ b/mypyc/test-data/irbuild-lists.test @@ -428,3 +428,40 @@ L4: L5: res = r8 return 1 + +[case testSimplifyListUnion] +from typing import List, Union + +def f(a: Union[List[str], List[bytes], int]) -> int: + if isinstance(a, list): + return len(a) + return a +[out] +def f(a): + a :: union[list, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: list + r5 :: ptr + r6 :: native_int + r7 :: short_int + r8 :: int +L0: + r0 = load_address PyList_Type + r1 = PyObject_IsInstance(a, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = borrow cast(list, a) + r5 = get_element_ptr r4 ob_size :: PyVarObject + r6 = load_mem r5 :: native_int* + keep_alive r4 + r7 = r6 << 1 + keep_alive a + return r7 +L2: + r8 = unbox(int, a) + return r8 diff --git a/mypyc/test-data/irbuild-optional.test b/mypyc/test-data/irbuild-optional.test index 4b1d3d1ffec2..e98cf1b19e2e 100644 --- a/mypyc/test-data/irbuild-optional.test +++ b/mypyc/test-data/irbuild-optional.test @@ -527,14 +527,10 @@ class B: [out] def f(o): - o :: union[object, object] - r0 :: object - r1 :: str - r2, r3 :: object + o :: object + r0 :: str + r1 :: object L0: - r0 = o - r1 = 'x' - r2 = CPyObject_GetAttr(r0, r1) - r3 = r2 -L1: + r0 = 'x' + r1 = CPyObject_GetAttr(o, r0) return 1 From 9dc624bec2e76810548651d72fcfe4619bdb4a78 Mon Sep 17 00:00:00 2001 From: Joshua Bronson Date: Wed, 28 Dec 2022 19:05:29 -0500 Subject: [PATCH 152/292] [mypyc] Improve error message for multiple inheritance. (#14344) Ref: mypyc/mypyc#962 --- mypyc/irbuild/classdef.py | 2 +- mypyc/test-data/commandline.test | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 34fc1fd766b0..b1f2ed1a1a65 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -84,7 +84,7 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: # classes aren't necessarily populated yet at # prepare_class_def time. if any(ir.base_mro[i].base != ir.base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): - builder.error("Non-trait MRO must be linear", cdef.line) + builder.error("Multiple inheritance is not supported (except for traits)", cdef.line) if ir.allow_interpreted_subclasses: for parent in ir.mro: diff --git a/mypyc/test-data/commandline.test b/mypyc/test-data/commandline.test index 6612df9e1886..bc2713a20f7d 100644 --- a/mypyc/test-data/commandline.test +++ b/mypyc/test-data/commandline.test @@ -164,7 +164,7 @@ class Trait2(Concrete2): class NonExt(Concrete1): # E: Non-extension classes may not inherit from extension classes pass -class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Non-trait MRO must be linear +class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Multiple inheritance is not supported (except for traits) pass @decorator From 5e817cd808b5d1ecd6f2d93c20b6e6fd1de94514 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 29 Dec 2022 01:17:11 +0000 Subject: [PATCH 153/292] Stubtest: clean up the `_belongs_to_runtime` function (#14361) There's a small semantic change in this PR (instead of returning `False` if trying to access the `__module__` attribute raises an exception, we now just move on to the next heuristic). But the main purpose of this PR is to make the code more readable, as this function was getting quite hard to understand. Co-authored-by: hauntsaninja --- mypy/stubtest.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 5946324d4619..0e4c36e684c1 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -346,16 +346,20 @@ def verify_mypyfile( imported_symbols = _get_imported_symbol_names(runtime) def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: + """Heuristics to determine whether a name originates from another module.""" obj = getattr(r, attr) if isinstance(obj, types.ModuleType): return False if callable(obj): + # It's highly likely to be a class or a function if it's callable, + # so the __module__ attribute will give a good indication of which module it comes from try: - obj_mod = getattr(obj, "__module__", None) + obj_mod = obj.__module__ except Exception: - return False - if obj_mod is not None: - return bool(obj_mod == r.__name__) + pass + else: + if isinstance(obj_mod, str): + return bool(obj_mod == r.__name__) if imported_symbols is not None: return attr not in imported_symbols return True @@ -367,8 +371,9 @@ def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: m for m in dir(runtime) if not is_probably_private(m) - # Ensure that the object's module is `runtime`, since in the absence of __all__ we - # don't have a good way to detect re-exports at runtime. + # Filter out objects that originate from other modules (best effort). Note that in the + # absence of __all__, we don't have a way to detect explicit / intentional re-exports + # at runtime and _belongs_to_runtime(runtime, m) } ) From 2c225659f3ae3089a597b9ab148050dd97c48cc9 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 29 Dec 2022 01:23:44 +0000 Subject: [PATCH 154/292] stubtest: Improve error message for `__all__`-related errors (#14362) This error is *only* emitted if `__all__` is included in the stub, so the 'if present' clause is unnecessary Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/stubtest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 0e4c36e684c1..bfd8e2b9c81a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -267,7 +267,7 @@ def _verify_exported_names( object_path + ["__all__"], ( "names exported from the stub do not correspond to the names exported at runtime. " - "This is probably due to things being missing from the stub, or if present, an inaccurate `__all__` in the stub" + "This is probably due to things being missing from the stub or an inaccurate `__all__` in the stub" ), # Pass in MISSING instead of the stub and runtime objects, as the line numbers aren't very # relevant here, and it makes for a prettier error message From e51fb561854b5e04c4d5572deca282e1c7d68519 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 10:13:54 +0000 Subject: [PATCH 155/292] A few miscellaneous micro-optimizations (#14366) These are part of the changes that collectively bring a 6% performance improvement. These are all pretty minor. --- mypy/checker.py | 10 ++++++---- mypy/checkexpr.py | 2 +- mypy/types.py | 8 ++++++-- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 7c5fcba1bb09..c265ac4905fb 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -378,10 +378,6 @@ def __init__( self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin - self.expr_checker = mypy.checkexpr.ExpressionChecker( - self, self.msg, self.plugin, per_line_checking_time_ns - ) - self.pattern_checker = PatternChecker(self, self.msg, self.plugin) self.tscope = Scope() self.scope = CheckerScope(tree) self.binder = ConditionalTypeBinder() @@ -419,6 +415,12 @@ def __init__( # example when type-checking class decorators. self.allow_abstract_call = False + # Child checker objects for specific AST node types + self.expr_checker = mypy.checkexpr.ExpressionChecker( + self, self.msg, self.plugin, per_line_checking_time_ns + ) + self.pattern_checker = PatternChecker(self, self.msg, self.plugin) + @property def type_context(self) -> list[Type | None]: return self.expr_checker.type_context diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 5993639be406..2e93a598fb7e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -276,7 +276,7 @@ def __init__( self.msg = msg self.plugin = plugin self.per_line_checking_time_ns = per_line_checking_time_ns - self.collect_line_checking_stats = self.chk.options.line_checking_stats is not None + self.collect_line_checking_stats = chk.options.line_checking_stats is not None # Are we already visiting some expression? This is used to avoid double counting # time for nested expressions. self.in_expression = False diff --git a/mypy/types.py b/mypy/types.py index b48280466c3e..e25630e794db 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2623,13 +2623,17 @@ def __init__( # We must keep this false to avoid crashes during semantic analysis. # TODO: maybe switch this to True during type-checking pass? self.items = flatten_nested_unions(items, handle_type_alias_type=False) - self.can_be_true = any(item.can_be_true for item in items) - self.can_be_false = any(item.can_be_false for item in items) # is_evaluated should be set to false for type comments and string literals self.is_evaluated = is_evaluated # uses_pep604_syntax is True if Union uses OR syntax (X | Y) self.uses_pep604_syntax = uses_pep604_syntax + def can_be_true_default(self) -> bool: + return any(item.can_be_true for item in self.items) + + def can_be_false_default(self) -> bool: + return any(item.can_be_false for item in self.items) + def __hash__(self) -> int: return hash(frozenset(self.items)) From 0070071d461dd57e2dc9b8a215333212167e13c8 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 13:40:54 +0000 Subject: [PATCH 156/292] [mypyc] Fixes to union simplification (#14364) Flatten nested unions before simplifying unions. Simplify item type unions in loops. This fixes a crash introduced in #14363. --- mypyc/ir/rtypes.py | 37 ++++++++++ mypyc/irbuild/builder.py | 13 +++- mypyc/irbuild/mapper.py | 13 +--- mypyc/test-data/irbuild-lists.test | 70 ++++++++++++++++++- .../test/{test_subtype.py => test_typeops.py} | 26 ++++++- 5 files changed, 141 insertions(+), 18 deletions(-) rename mypyc/test/{test_subtype.py => test_typeops.py} (64%) diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index 7fe8a940e4c2..babfe0770f35 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -797,6 +797,30 @@ def __init__(self, items: list[RType]) -> None: self.items_set = frozenset(items) self._ctype = "PyObject *" + @staticmethod + def make_simplified_union(items: list[RType]) -> RType: + """Return a normalized union that covers the given items. + + Flatten nested unions and remove duplicate items. + + Overlapping items are *not* simplified. For example, + [object, str] will not be simplified. + """ + items = flatten_nested_unions(items) + assert items + + # Remove duplicate items using set + list to preserve item order + seen = set() + new_items = [] + for item in items: + if item not in seen: + new_items.append(item) + seen.add(item) + if len(new_items) > 1: + return RUnion(new_items) + else: + return new_items[0] + def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_runion(self) @@ -823,6 +847,19 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RUnion: return RUnion(types) +def flatten_nested_unions(types: list[RType]) -> list[RType]: + if not any(isinstance(t, RUnion) for t in types): + return types # Fast path + + flat_items: list[RType] = [] + for t in types: + if isinstance(t, RUnion): + flat_items.extend(flatten_nested_unions(t.items)) + else: + flat_items.append(t) + return flat_items + + def optional_value_type(rtype: RType) -> RType | None: """If rtype is the union of none_rprimitive and another type X, return X. diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 6310c25c64fb..792697970785 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -53,6 +53,7 @@ Type, TypeOfAny, UninhabitedType, + UnionType, get_proper_type, ) from mypy.util import split_target @@ -85,6 +86,7 @@ RInstance, RTuple, RType, + RUnion, bitmap_rprimitive, c_int_rprimitive, c_pyssize_t_rprimitive, @@ -864,8 +866,15 @@ def extract_int(self, e: Expression) -> int | None: return None def get_sequence_type(self, expr: Expression) -> RType: - target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) + return self.get_sequence_type_from_type(self.types[expr]) + + def get_sequence_type_from_type(self, target_type: Type) -> RType: + target_type = get_proper_type(target_type) + if isinstance(target_type, UnionType): + return RUnion.make_simplified_union( + [self.get_sequence_type_from_type(item) for item in target_type.items] + ) + assert isinstance(target_type, Instance), target_type if target_type.type.fullname == "builtins.str": return str_rprimitive else: diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index a108766644ce..dddb35230fd5 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -116,18 +116,7 @@ def type_to_rtype(self, typ: Type | None) -> RType: elif isinstance(typ, NoneTyp): return none_rprimitive elif isinstance(typ, UnionType): - # Remove redundant items using set + list to preserve item order - seen = set() - items = [] - for item in typ.items: - rtype = self.type_to_rtype(item) - if rtype not in seen: - items.append(rtype) - seen.add(rtype) - if len(items) > 1: - return RUnion(items) - else: - return items[0] + return RUnion.make_simplified_union([self.type_to_rtype(item) for item in typ.items]) elif isinstance(typ, AnyType): return object_rprimitive elif isinstance(typ, TypeType): diff --git a/mypyc/test-data/irbuild-lists.test b/mypyc/test-data/irbuild-lists.test index b82217465fef..cb9687a2f942 100644 --- a/mypyc/test-data/irbuild-lists.test +++ b/mypyc/test-data/irbuild-lists.test @@ -430,14 +430,20 @@ L5: return 1 [case testSimplifyListUnion] -from typing import List, Union +from typing import List, Union, Optional -def f(a: Union[List[str], List[bytes], int]) -> int: +def narrow(a: Union[List[str], List[bytes], int]) -> int: if isinstance(a, list): return len(a) return a +def loop(a: Union[List[str], List[bytes]]) -> None: + for x in a: + pass +def nested_union(a: Union[List[str], List[Optional[str]]]) -> None: + for x in a: + pass [out] -def f(a): +def narrow(a): a :: union[list, int] r0 :: object r1 :: int32 @@ -465,3 +471,61 @@ L1: L2: r8 = unbox(int, a) return r8 +def loop(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, bytes] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, bytes], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 +def nested_union(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, None] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, None], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 diff --git a/mypyc/test/test_subtype.py b/mypyc/test/test_typeops.py similarity index 64% rename from mypyc/test/test_subtype.py rename to mypyc/test/test_typeops.py index 4a0d8737c852..f414edd1a2bb 100644 --- a/mypyc/test/test_subtype.py +++ b/mypyc/test/test_typeops.py @@ -1,16 +1,19 @@ -"""Test cases for is_subtype and is_runtime_subtype.""" +"""Test cases for various RType operations.""" from __future__ import annotations import unittest from mypyc.ir.rtypes import ( + RUnion, bit_rprimitive, bool_rprimitive, int32_rprimitive, int64_rprimitive, int_rprimitive, + object_rprimitive, short_int_rprimitive, + str_rprimitive, ) from mypyc.rt_subtype import is_runtime_subtype from mypyc.subtype import is_subtype @@ -50,3 +53,24 @@ def test_bit(self) -> None: def test_bool(self) -> None: assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) + + +class TestUnionSimplification(unittest.TestCase): + def test_simple_type_result(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive]) == int_rprimitive + + def test_remove_duplicate(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive, int_rprimitive]) == int_rprimitive + + def test_cannot_simplify(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, str_rprimitive, object_rprimitive] + ) == RUnion([int_rprimitive, str_rprimitive, object_rprimitive]) + + def test_nested(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, int_rprimitive])] + ) == RUnion([int_rprimitive, str_rprimitive]) + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, RUnion([int_rprimitive])])] + ) == RUnion([int_rprimitive, str_rprimitive]) From 47747f2f58aef4cac30c0cd9416bc2a521152de5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 13:41:35 +0000 Subject: [PATCH 157/292] Some semantic analyzer micro-optimizations (#14367) The biggest change is replacing some calls to bound methods with trait method calls, which are faster when compiled. Also remove an unused argument to TypeVarLikeQuery and make a few misc tweaks. (Various small optimizations, including these, together netted a 6% performance improvement in self check.) --- mypy/semanal.py | 6 ++--- mypy/type_visitor.py | 2 +- mypy/typeanal.py | 58 +++++++++++++++++++++++++++----------------- 3 files changed, 40 insertions(+), 26 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index eceb96ca63ee..51310e4f3e4d 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1915,7 +1915,7 @@ def get_all_bases_tvars( except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) return remove_dups(tvars) @@ -1933,7 +1933,7 @@ def get_and_bind_all_tvars(self, type_exprs: list[Expression]) -> list[TypeVarLi except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) tvars = remove_dups(tvars) # Variables are defined in order of textual appearance. tvar_defs = [] @@ -3294,7 +3294,7 @@ def analyze_alias( ) return None, [], set(), [] - found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvar_defs: list[TypeVarLikeType] = [] namespace = self.qualified_name(name) with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index 823e74e7e283..c5324357117b 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -315,7 +315,7 @@ class TypeQuery(SyntheticTypeVisitor[T]): # TODO: check that we don't have existing violations of this rule. """ - def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: + def __init__(self, strategy: Callable[[list[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 28f293613d50..0755b21854de 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -4,7 +4,6 @@ import itertools from contextlib import contextmanager -from itertools import chain from typing import Callable, Iterable, Iterator, List, Sequence, Tuple, TypeVar from typing_extensions import Final, Protocol @@ -203,8 +202,6 @@ def __init__( allow_type_any: bool = False, ) -> None: self.api = api - self.lookup_qualified = api.lookup_qualified - self.lookup_fqn_func = api.lookup_fully_qualified self.fail_func = api.fail self.note_func = api.note self.tvar_scope = tvar_scope @@ -244,6 +241,14 @@ def __init__( # Allow variables typed as Type[Any] and type (useful for base classes). self.allow_type_any = allow_type_any + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + return self.api.lookup_qualified(name, ctx, suppress_errors) + + def lookup_fully_qualified(self, name: str) -> SymbolTableNode: + return self.api.lookup_fully_qualified(name) + def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) if t.optional: @@ -1408,14 +1413,17 @@ def tvar_scope_frame(self) -> Iterator[None]: yield self.tvar_scope = old_scope + def find_type_var_likes(self, t: Type, include_callables: bool = True) -> TypeVarLikeList: + return t.accept( + TypeVarLikeQuery(self.api, self.tvar_scope, include_callables=include_callables) + ) + def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLikeExpr]]: """Return list of unique type variables referred to in a callable.""" names: list[str] = [] tvars: list[TypeVarLikeExpr] = [] for arg in type.arg_types: - for name, tvar_expr in arg.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope) - ): + for name, tvar_expr in self.find_type_var_likes(arg): if name not in names: names.append(name) tvars.append(tvar_expr) @@ -1423,12 +1431,13 @@ def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLik # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. - for name, tvar_expr in type.ret_type.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope, include_callables=False) - ): + for name, tvar_expr in self.find_type_var_likes(type.ret_type, include_callables=False): if name not in names: names.append(name) tvars.append(tvar_expr) + + if not names: + return [] # Fast path return list(zip(names, tvars)) def bind_function_type_variables( @@ -1546,7 +1555,7 @@ def named_type( line: int = -1, column: int = -1, ) -> Instance: - node = self.lookup_fqn_func(fully_qualified_name) + node = self.lookup_fully_qualified(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) if args is not None: @@ -1785,7 +1794,9 @@ def set_any_tvars( return TypeAliasType(node, [any_type] * len(node.alias_tvars), newline, newcolumn) -def remove_dups(tvars: Iterable[T]) -> list[T]: +def remove_dups(tvars: list[T]) -> list[T]: + if len(tvars) <= 1: + return tvars # Get unique elements in order of appearance all_tvars: set[T] = set() new_tvars: list[T] = [] @@ -1796,8 +1807,13 @@ def remove_dups(tvars: Iterable[T]) -> list[T]: return new_tvars -def flatten_tvars(ll: Iterable[list[T]]) -> list[T]: - return remove_dups(chain.from_iterable(ll)) +def flatten_tvars(lists: list[list[T]]) -> list[T]: + result: list[T] = [] + for lst in lists: + for item in lst: + if item not in result: + result.append(item) + return result class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): @@ -1805,17 +1821,15 @@ class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): def __init__( self, - lookup: Callable[[str, Context], SymbolTableNode | None], + api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope, *, include_callables: bool = True, - include_bound_tvars: bool = False, ) -> None: - self.include_callables = include_callables - self.lookup = lookup - self.scope = scope - self.include_bound_tvars = include_bound_tvars super().__init__(flatten_tvars) + self.api = api + self.scope = scope + self.include_callables = include_callables # Only include type variables in type aliases args. This would be anyway # that case if we expand (as target variables would be overridden with args) # and it may cause infinite recursion on invalid (diverging) recursive aliases. @@ -1833,16 +1847,16 @@ def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: if name.endswith("args"): if name.endswith(".args") or name.endswith(".kwargs"): base = ".".join(name.split(".")[:-1]) - n = self.lookup(base, t) + n = self.api.lookup_qualified(base, t) if n is not None and isinstance(n.node, ParamSpecExpr): node = n name = base if node is None: - node = self.lookup(name, t) + node = self.api.lookup_qualified(name, t) if ( node and isinstance(node.node, TypeVarLikeExpr) - and (self.include_bound_tvars or self.scope.get_binding(node) is None) + and self.scope.get_binding(node) is None ): assert isinstance(node.node, TypeVarLikeExpr) return [(name, node.node)] From 8884f7d38c4ee190068666b633e4433d27c8293e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 13:42:22 +0000 Subject: [PATCH 158/292] Micro-optimize flatten_nested_unions (#14368) Avoid constructing a new list if there is nothing to flatten (and the input is a list, which is often the case). --- mypy/types.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index e25630e794db..fa1502f48fa4 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3381,12 +3381,20 @@ def _flattened(types: Iterable[Type]) -> Iterable[Type]: def flatten_nested_unions( - types: Iterable[Type], handle_type_alias_type: bool = True + types: Sequence[Type], handle_type_alias_type: bool = True ) -> list[Type]: """Flatten nested unions in a type list.""" + if not isinstance(types, list): + typelist = list(types) + else: + typelist = cast("list[Type]", types) + + # Fast path: most of the time there is nothing to flatten + if not any(isinstance(t, (TypeAliasType, UnionType)) for t in typelist): # type: ignore[misc] + return typelist + flat_items: list[Type] = [] - # TODO: avoid duplicate types in unions (e.g. using hash) - for t in types: + for t in typelist: tp = get_proper_type(t) if handle_type_alias_type else t if isinstance(tp, ProperType) and isinstance(tp, UnionType): flat_items.extend( From 52172a3a0197e2407e666fda5b83161ea8e17183 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 15:17:47 +0000 Subject: [PATCH 159/292] Micro-optimize get_proper_type(s) (#14369) These are used a lot, so it makes sense to tune them a bit. We now avoid allocations in a common case, when compiled. (Various small optimizations, including these, together netted a 6% performance improvement in self check.) --- mypy/checkexpr.py | 9 ++++----- mypy/types.py | 24 ++++++++++++++++++------ 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 2e93a598fb7e..5c6db86964ac 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2213,7 +2213,7 @@ def check_overload_call( # we don't want to introduce internal inconsistencies. unioned_result = ( make_simplified_union(list(returns), context.line, context.column), - self.combine_function_signatures(inferred_types), + self.combine_function_signatures(get_proper_types(inferred_types)), ) # Step 3: We try checking each branch one-by-one. @@ -2554,7 +2554,7 @@ def type_overrides_set( for expr in exprs: del self.type_overrides[expr] - def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | CallableType: + def combine_function_signatures(self, types: list[ProperType]) -> AnyType | CallableType: """Accepts a list of function signatures and attempts to combine them together into a new CallableType consisting of the union of all of the given arguments and return types. @@ -2562,10 +2562,9 @@ def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | Callab an ambiguity because of Any in arguments). """ assert types, "Trying to merge no callables" - types = get_proper_types(types) if not all(isinstance(c, CallableType) for c in types): return AnyType(TypeOfAny.special_form) - callables = cast(Sequence[CallableType], types) + callables = cast("list[CallableType]", types) if len(callables) == 1: return callables[0] @@ -3463,7 +3462,7 @@ def check_op( # we call 'combine_function_signature' instead of just unioning the inferred # callable types. results_final = make_simplified_union(all_results) - inferred_final = self.combine_function_signatures(all_inferred) + inferred_final = self.combine_function_signatures(get_proper_types(all_inferred)) return results_final, inferred_final else: return self.check_method_call_by_name( diff --git a/mypy/types.py b/mypy/types.py index fa1502f48fa4..354a740567dc 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2888,23 +2888,35 @@ def get_proper_type(typ: Type | None) -> ProperType | None: typ = typ.type_guard while isinstance(typ, TypeAliasType): typ = typ._expand_once() - assert isinstance(typ, ProperType), typ # TODO: store the name of original type alias on this type, so we can show it in errors. - return typ + return cast(ProperType, typ) @overload -def get_proper_types(it: Iterable[Type]) -> list[ProperType]: # type: ignore[misc] +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[misc] ... @overload -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType | None]: +def get_proper_types( + types: list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType | None]: ... -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType] | list[ProperType | None]: - return [get_proper_type(t) for t in it] +def get_proper_types( + types: list[Type] | list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType] | list[ProperType | None]: + if isinstance(types, list): + typelist = types + # Optimize for the common case so that we don't need to allocate anything + if not any( + isinstance(t, (TypeAliasType, TypeGuardedType)) for t in typelist # type: ignore[misc] + ): + return cast("list[ProperType]", typelist) + return [get_proper_type(t) for t in typelist] + else: + return [get_proper_type(t) for t in types] # We split off the type visitor base classes to another module From 9183b28401bd2928d921a068bbbc7e6565e77649 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 29 Dec 2022 15:18:06 +0000 Subject: [PATCH 160/292] Speed up make_simplified_union (#14370) If there is only one non-union item, there's nothing interesting to do. This is pretty common, and it avoids a fairly expensive `_remove_redundant_union_items` call. (Various small optimizations, including this, together netted a 6% performance improvement in self check.) --- mypy/typeops.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mypy/typeops.py b/mypy/typeops.py index baf5b8552eff..8c01fb118076 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -441,6 +441,7 @@ def make_simplified_union( * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any + * [int, Union[bytes, str]] -> Union[int, bytes, str] Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. @@ -455,10 +456,14 @@ def make_simplified_union( # Step 1: expand all nested unions items = flatten_nested_unions(items) - # Step 2: remove redundant unions + # Step 2: fast path for single item + if len(items) == 1: + return get_proper_type(items[0]) + + # Step 3: remove redundant unions simplified_set: Sequence[Type] = _remove_redundant_union_items(items, keep_erased) - # Step 3: If more than one literal exists in the union, try to simplify + # Step 4: If more than one literal exists in the union, try to simplify if ( contract_literals and sum(isinstance(get_proper_type(item), LiteralType) for item in simplified_set) > 1 @@ -471,7 +476,7 @@ def make_simplified_union( if nitems > 1 and ( nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) ): - # Step 4: At last, we erase any (inconsistent) extra attributes on instances. + # Step 5: At last, we erase any (inconsistent) extra attributes on instances. # Initialize with None instead of an empty set as a micro-optimization. The set # is needed very rarely, so we try to avoid constructing it. From c8316545971833e1f6eafd09de6aa12d9a00278c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 31 Dec 2022 17:50:18 -0700 Subject: [PATCH 161/292] Sync typeshed (#14375) Source commit: https://github.com/python/typeshed/commit/46f0d918efc074a3df0d56f94f755e1371d899f0 Co-authored-by: mypybot <> Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/typeshed/stdlib/_winapi.pyi | 1 + mypy/typeshed/stdlib/ast.pyi | 16 +- mypy/typeshed/stdlib/asyncio/subprocess.pyi | 98 ++++++++++- mypy/typeshed/stdlib/asyncio/tasks.pyi | 2 +- mypy/typeshed/stdlib/builtins.pyi | 8 +- mypy/typeshed/stdlib/collections/__init__.pyi | 7 +- mypy/typeshed/stdlib/ctypes/__init__.pyi | 4 +- mypy/typeshed/stdlib/datetime.pyi | 33 +++- mypy/typeshed/stdlib/json/__init__.pyi | 6 +- mypy/typeshed/stdlib/json/encoder.pyi | 4 +- mypy/typeshed/stdlib/netrc.pyi | 6 +- mypy/typeshed/stdlib/shutil.pyi | 25 ++- mypy/typeshed/stdlib/subprocess.pyi | 164 +++++++++--------- mypy/typeshed/stdlib/types.pyi | 3 + mypy/typeshed/stdlib/unittest/mock.pyi | 8 +- mypy/typeshed/stdlib/zipfile.pyi | 28 ++- mypy/typeshed/stdlib/zipimport.pyi | 2 +- 17 files changed, 275 insertions(+), 140 deletions(-) diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index 3ccac7e6b7e6..4fbefc33abb1 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -184,6 +184,7 @@ if sys.platform == "win32": def PeekNamedPipe(__handle: int, __size: int = ...) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... + def UnmapViewOfFile(__address: int) -> None: ... @overload def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index b2cff5b00264..9a5bf0a623fb 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -89,6 +89,7 @@ class NodeVisitor: def visit_Constant(self, node: Constant) -> Any: ... if sys.version_info >= (3, 8): def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... def visit_Attribute(self, node: Attribute) -> Any: ... def visit_Subscript(self, node: Subscript) -> Any: ... @@ -135,6 +136,19 @@ class NodeVisitor: def visit_keyword(self, node: keyword) -> Any: ... def visit_alias(self, node: alias) -> Any: ... def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + # visit methods for deprecated nodes def visit_ExtSlice(self, node: ExtSlice) -> Any: ... def visit_Index(self, node: Index) -> Any: ... @@ -261,7 +275,7 @@ else: def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: _T) -> _T: ... -def get_docstring(node: AST, clean: bool = ...) -> str | None: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = ...) -> str | None: ... def increment_lineno(node: _T, n: int = ...) -> _T: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index 7fb588396905..d483f57551b0 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -2,7 +2,7 @@ import subprocess import sys from _typeshed import StrOrBytesPath from asyncio import events, protocols, streams, transports -from collections.abc import Callable +from collections.abc import Callable, Collection from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -40,7 +40,7 @@ class Process: def kill(self) -> None: ... async def communicate(self, input: bytes | bytearray | memoryview | None = ...) -> tuple[bytes, bytes]: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): async def create_subprocess_shell( cmd: str | bytes, stdin: int | IO[Any] | None = ..., @@ -65,7 +65,13 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, @@ -91,10 +97,80 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... -else: +elif sys.version_info >= (3, 10): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = ..., + stdout: int | IO[Any] | None = ..., + stderr: int | IO[Any] | None = ..., + limit: int = ..., + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = ..., + shell: Literal[True] = ..., + bufsize: Literal[0] = ..., + encoding: None = ..., + errors: None = ..., + text: Literal[False, None] = ..., + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = ..., + stdout: int | IO[Any] | None = ..., + stderr: int | IO[Any] | None = ..., + limit: int = ..., + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = ..., + shell: Literal[True] = ..., + bufsize: Literal[0] = ..., + encoding: None = ..., + errors: None = ..., + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = ..., + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + +else: # >= 3.9 async def create_subprocess_shell( cmd: str | bytes, stdin: int | IO[Any] | None = ..., @@ -120,7 +196,11 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, @@ -147,5 +227,9 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 67581eb6a5ad..43dd020fa99d 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -270,7 +270,7 @@ else: # While this is true in general, here it's sort-of okay to have a covariant subclass, # since the only reason why `asyncio.Future` is invariant is the `set_result()` method, # and `asyncio.Task.set_result()` always raises. -class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] +class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] if sys.version_info >= (3, 8): def __init__( self, diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index a47d774467dd..8fbef893ac57 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1,6 +1,6 @@ +import _ast import sys import types -from _ast import AST from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( AnyStr_co, @@ -1096,7 +1096,7 @@ class property: class _NotImplementedType(Any): # type: ignore[misc] # A little weird, but typing the __call__ as NotImplemented makes the error message # for NotImplemented() much better - __call__: NotImplemented # type: ignore[valid-type] + __call__: NotImplemented # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] NotImplemented: _NotImplementedType @@ -1131,7 +1131,7 @@ if sys.version_info >= (3, 10): # TODO: `compile` has a more precise return type in reality; work on a way of expressing that? if sys.version_info >= (3, 8): def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int = ..., @@ -1143,7 +1143,7 @@ if sys.version_info >= (3, 8): else: def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int = ..., diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 37505c256d9c..2955aa3b3cd0 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -327,16 +327,17 @@ class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): # The C implementations of the "views" classes # (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, # but they are not exposed anywhere) +# pyright doesn't have a specific error code for subclassing error! @final -class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_KT_co]: ... @final -class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @final -class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_VT_co]: ... class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 84e4ba07a02a..2e26a08f81f9 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -64,8 +64,8 @@ class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] - def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _CData(metaclass=_CDataMeta): _b_base: int diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index b1b3c17ee25b..43f5902c3c06 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -70,7 +70,14 @@ class date: @property def day(self) -> int: ... def ctime(self) -> str: ... - def strftime(self, __format: str) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... @@ -140,7 +147,14 @@ class time: def isoformat(self, timespec: str = ...) -> str: ... @classmethod def fromisoformat(cls: type[Self], __time_string: str) -> Self: ... - def strftime(self, __format: str) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... @@ -233,11 +247,16 @@ class datetime(date): def tzinfo(self) -> _TzInfo | None: ... @property def fold(self) -> int: ... - # The first parameter in `fromtimestamp` is actually positional-or-keyword, - # but it is named "timestamp" in the C implementation and "t" in the Python implementation, - # so it is only truly *safe* to pass it as a positional argument. - @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + # On <3.12, the name of the first parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + @classmethod + def fromtimestamp(cls: type[Self], timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + else: + @classmethod + def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + @classmethod def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... if sys.version_info >= (3, 8): diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index 64ab8a11a45d..73bb5e8b4c1a 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -1,6 +1,6 @@ -from _typeshed import SupportsRead +from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable -from typing import IO, Any +from typing import Any from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder @@ -23,7 +23,7 @@ def dumps( ) -> str: ... def dump( obj: Any, - fp: IO[str], + fp: SupportsWrite[str], *, skipkeys: bool = ..., ensure_ascii: bool = ..., diff --git a/mypy/typeshed/stdlib/json/encoder.pyi b/mypy/typeshed/stdlib/json/encoder.pyi index 60e82061946b..0444ae477a96 100644 --- a/mypy/typeshed/stdlib/json/encoder.pyi +++ b/mypy/typeshed/stdlib/json/encoder.pyi @@ -20,7 +20,7 @@ class JSONEncoder: check_circular: bool allow_nan: bool sort_keys: bool - indent: int + indent: int | str def __init__( self, *, @@ -29,7 +29,7 @@ class JSONEncoder: check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., - indent: int | None = ..., + indent: int | str | None = ..., separators: tuple[str, str] | None = ..., default: Callable[..., Any] | None = ..., ) -> None: ... diff --git a/mypy/typeshed/stdlib/netrc.pyi b/mypy/typeshed/stdlib/netrc.pyi index 803c78073348..217c0eb542d0 100644 --- a/mypy/typeshed/stdlib/netrc.pyi +++ b/mypy/typeshed/stdlib/netrc.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -10,7 +11,10 @@ class NetrcParseError(Exception): def __init__(self, msg: str, filename: StrOrBytesPath | None = ..., lineno: int | None = ...) -> None: ... # (login, account, password) tuple -_NetrcTuple: TypeAlias = tuple[str, str | None, str | None] +if sys.version_info >= (3, 11): + _NetrcTuple: TypeAlias = tuple[str, str, str] +else: + _NetrcTuple: TypeAlias = tuple[str, str | None, str | None] class netrc: hosts: dict[str, _NetrcTuple] diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 13c706de1cf4..568879d76003 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -2,7 +2,7 @@ import os import sys from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence -from typing import Any, AnyStr, NamedTuple, TypeVar, overload +from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload from typing_extensions import TypeAlias __all__ = [ @@ -84,13 +84,22 @@ else: _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], Any, Any], object] -if sys.version_info >= (3, 11): - def rmtree( - path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ..., *, dir_fd: int | None = ... - ) -> None: ... - -else: - def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... +class _RmtreeType(Protocol): + avoids_symlink_attacks: bool + if sys.version_info >= (3, 11): + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = ..., + onerror: _OnErrorCallback | None = ..., + *, + dir_fd: int | None = ..., + ) -> None: ... + + else: + def __call__(self, path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + +rmtree: _RmtreeType _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], object] diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 25b988adc52d..450eb8cd24d1 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Self, StrOrBytesPath -from collections.abc import Callable, Iterable, Mapping, Sequence +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, TypeVar, overload from typing_extensions import Literal, TypeAlias @@ -97,7 +97,7 @@ class CompletedProcess(Generic[_T]): args: _CMD, returncode: int, stdout: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] - stderr: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] + stderr: _T | None = ..., ) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 9): @@ -123,7 +123,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -157,7 +157,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -191,7 +191,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -226,7 +226,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -260,7 +260,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -294,7 +294,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -331,7 +331,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -364,7 +364,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -397,7 +397,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -431,7 +431,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -464,7 +464,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -497,7 +497,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -533,7 +533,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -565,7 +565,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -597,7 +597,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -630,7 +630,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -662,7 +662,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -694,7 +694,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -728,7 +728,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -756,7 +756,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -784,7 +784,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -813,7 +813,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -841,7 +841,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -869,7 +869,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -900,7 +900,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -931,7 +931,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -961,7 +961,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -989,7 +989,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -1015,7 +1015,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1046,7 +1046,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1076,7 +1076,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1104,7 +1104,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1129,7 +1129,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1160,7 +1160,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1191,7 +1191,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1223,7 +1223,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., input: _TXT | None = ..., @@ -1254,7 +1254,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1285,7 +1285,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1319,7 +1319,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1349,7 +1349,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1379,7 +1379,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1410,7 +1410,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., input: _TXT | None = ..., @@ -1440,7 +1440,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1470,7 +1470,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1503,7 +1503,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1532,7 +1532,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1561,7 +1561,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1591,7 +1591,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., input: _TXT | None = ..., @@ -1620,7 +1620,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1649,7 +1649,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1680,7 +1680,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1705,7 +1705,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1730,7 +1730,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1756,7 +1756,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., input: _TXT | None = ..., @@ -1781,7 +1781,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1806,7 +1806,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., input: _TXT | None = ..., @@ -1873,7 +1873,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -1904,7 +1904,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -1936,7 +1936,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -1967,7 +1967,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -1998,7 +1998,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2029,7 +2029,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2062,7 +2062,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2092,7 +2092,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2123,7 +2123,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2153,7 +2153,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2183,7 +2183,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2213,7 +2213,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2245,7 +2245,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2274,7 +2274,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2304,7 +2304,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2333,7 +2333,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2362,7 +2362,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2391,7 +2391,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2421,7 +2421,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2446,7 +2446,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2472,7 +2472,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2497,7 +2497,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2522,7 +2522,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2547,7 +2547,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index a40b6280f47c..6928032f92b1 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -68,6 +68,9 @@ _V_co = TypeVar("_V_co", covariant=True) @final class _Cell: + if sys.version_info >= (3, 8): + def __init__(self, __contents: object = ...) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index e4cedef1b425..47535499a9f2 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Self -from collections.abc import Awaitable, Callable, Iterable, Mapping, Sequence +from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence from contextlib import _GeneratorContextManager from types import TracebackType from typing import Any, Generic, TypeVar, overload @@ -9,6 +9,8 @@ from typing_extensions import Literal, TypeAlias _T = TypeVar("_T") _TT = TypeVar("_TT", bound=type[Any]) _R = TypeVar("_R") +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) if sys.version_info >= (3, 8): __all__ = ( @@ -258,6 +260,10 @@ class _patch_dict: clear: Any def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... + if sys.version_info >= (3, 10): + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: object) -> Any: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index 60134c915da7..e964cd6eda87 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -29,7 +29,6 @@ _DateTuple: TypeAlias = tuple[int, int, int, int, int, int] _ReadWriteMode: TypeAlias = Literal["r", "w"] _ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] _ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"] -_CompressionMode: TypeAlias = Literal[0, 8, 12, 14] class BadZipFile(Exception): ... @@ -101,7 +100,7 @@ class ZipFile: fp: IO[bytes] | None NameToInfo: dict[str, ZipInfo] start_dir: int # undocumented - compression: _CompressionMode # undocumented + compression: int # undocumented compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented pwd: bytes | None # undocumented @@ -111,7 +110,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: Literal["r"] = ..., - compression: _CompressionMode = ..., + compression: int = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -123,7 +122,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: _CompressionMode = ..., + compression: int = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -135,7 +134,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: _CompressionMode = ..., + compression: int = ..., allowZip64: bool = ..., compresslevel: int | None = ..., *, @@ -146,7 +145,7 @@ class ZipFile: self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., - compression: _CompressionMode = ..., + compression: int = ..., allowZip64: bool = ..., compresslevel: int | None = ..., ) -> None: ... @@ -185,19 +184,14 @@ class ZipFile: class PyZipFile(ZipFile): def __init__( - self, - file: str | IO[bytes], - mode: _ZipFileMode = ..., - compression: _CompressionMode = ..., - allowZip64: bool = ..., - optimize: int = ..., + self, file: str | IO[bytes], mode: _ZipFileMode = ..., compression: int = ..., allowZip64: bool = ..., optimize: int = ... ) -> None: ... def writepy(self, pathname: str, basename: str = ..., filterfunc: Callable[[str], bool] | None = ...) -> None: ... class ZipInfo: filename: str date_time: _DateTuple - compress_type: _CompressionMode + compress_type: int comment: bytes extra: bytes create_system: int @@ -275,10 +269,10 @@ if sys.version_info >= (3, 8): def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... -ZIP_STORED: Literal[0] -ZIP_DEFLATED: Literal[8] +ZIP_STORED: int +ZIP_DEFLATED: int ZIP64_LIMIT: int ZIP_FILECOUNT_LIMIT: int ZIP_MAX_COMMENT: int -ZIP_BZIP2: Literal[12] -ZIP_LZMA: Literal[14] +ZIP_BZIP2: int +ZIP_LZMA: int diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index d3017f385c0c..dc2f1aee0752 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -20,7 +20,7 @@ class zipimporter: def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... - def get_data(self, pathname: str) -> str: ... + def get_data(self, pathname: str) -> bytes: ... def get_filename(self, fullname: str) -> str: ... def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented def get_source(self, fullname: str) -> str | None: ... From 5f480f37ff3704ed581db944bfdf99c655b3dabe Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 2 Jan 2023 17:48:48 +0100 Subject: [PATCH 162/292] Fix Unpack imported from typing (#14378) Add missing check for `typing.Unpack` to fix running with `--python 3.11`. Co-authored-by: Jelle Zijlstra --- mypy/semanal.py | 2 +- test-data/unit/check-python311.test | 12 ++++++++++++ test-data/unit/lib-stub/typing.pyi | 2 ++ 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 51310e4f3e4d..13350b58af87 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1881,7 +1881,7 @@ def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None: # It's bound by our type variable scope return None return unbound.name, sym.node - if sym and sym.fullname == "typing_extensions.Unpack": + if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"): inner_t = unbound.args[0] if not isinstance(inner_t, UnboundType): return None diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index 9bf62b0c489d..7196f10f8863 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -51,3 +51,15 @@ try: except* (RuntimeError, ExceptionGroup) as e: # E: Exception type in except* cannot derive from BaseExceptionGroup reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, Any]]" [builtins fixtures/exception.pyi] + +[case testBasicTypeVarTupleGeneric] +from typing import Generic, TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +class Variadic(Generic[Unpack[Ts]]): + ... + +variadic: Variadic[int, str] +reveal_type(variadic) # N: Revealed type is "__main__.Variadic[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index f3850d3936b4..a306b70f74d7 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -27,6 +27,8 @@ NoReturn = 0 Never = 0 NewType = 0 ParamSpec = 0 +TypeVarTuple = 0 +Unpack = 0 Self = 0 TYPE_CHECKING = 0 From f104914b9d4f58796042667a8b3d06b5868bee34 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 2 Jan 2023 23:47:03 -0800 Subject: [PATCH 163/292] Improve performance for errors on class with many attributes (#14379) When checking manticore with `--check-untyped-defs`, this is a 4x total speedup from master, from ~320s to ~80s (uncompiled). I looked into this because of https://github.com/python/typeshed/pull/9443#issuecomment-1369120219 --- mypy/messages.py | 37 +++++++++++++++++++---------- mypy/semanal.py | 2 +- test-data/unit/check-kwargs.test | 6 ++--- test-data/unit/check-modules.test | 2 +- test-data/unit/semanal-modules.test | 4 ++-- 5 files changed, 32 insertions(+), 19 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index b8c04fe2b8e9..5d8bf79ec8a3 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -15,7 +15,7 @@ import re from contextlib import contextmanager from textwrap import dedent -from typing import Any, Callable, Iterable, Iterator, List, Sequence, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, Sequence, cast from typing_extensions import Final from mypy import errorcodes as codes, message_registry @@ -440,7 +440,7 @@ def has_no_attr( alternatives.discard(member) matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] - matches.extend(best_matches(member, alternatives)[:3]) + matches.extend(best_matches(member, alternatives, n=3)) if member == "__aiter__" and matches == ["__iter__"]: matches = [] # Avoid misleading suggestion if matches: @@ -928,11 +928,11 @@ def unexpected_keyword_argument( matching_type_args.append(callee_arg_name) else: not_matching_type_args.append(callee_arg_name) - matches = best_matches(name, matching_type_args) + matches = best_matches(name, matching_type_args, n=3) if not matches: - matches = best_matches(name, not_matching_type_args) + matches = best_matches(name, not_matching_type_args, n=3) if matches: - msg += f"; did you mean {pretty_seq(matches[:3], 'or')}?" + msg += f"; did you mean {pretty_seq(matches, 'or')}?" self.fail(msg, context, code=codes.CALL_ARG) module = find_defining_module(self.modules, callee) if module: @@ -1695,10 +1695,10 @@ def typeddict_key_not_found( context, code=codes.TYPEDDICT_ITEM, ) - matches = best_matches(item_name, typ.items.keys()) + matches = best_matches(item_name, typ.items.keys(), n=3) if matches: self.note( - "Did you mean {}?".format(pretty_seq(matches[:3], "or")), + "Did you mean {}?".format(pretty_seq(matches, "or")), context, code=codes.TYPEDDICT_ITEM, ) @@ -2798,11 +2798,24 @@ def find_defining_module(modules: dict[str, MypyFile], typ: CallableType) -> Myp COMMON_MISTAKES: Final[dict[str, Sequence[str]]] = {"add": ("append", "extend")} -def best_matches(current: str, options: Iterable[str]) -> list[str]: - ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options} - return sorted( - (o for o in options if ratios[o] > 0.75), reverse=True, key=lambda v: (ratios[v], v) - ) +def _real_quick_ratio(a: str, b: str) -> float: + # this is an upper bound on difflib.SequenceMatcher.ratio + # similar to difflib.SequenceMatcher.real_quick_ratio, but faster since we don't instantiate + al = len(a) + bl = len(b) + return 2.0 * min(al, bl) / (al + bl) + + +def best_matches(current: str, options: Collection[str], n: int) -> list[str]: + # narrow down options cheaply + assert current + options = [o for o in options if _real_quick_ratio(current, o) > 0.75] + if len(options) >= 50: + options = [o for o in options if abs(len(o) - len(current)) <= 1] + + ratios = {option: difflib.SequenceMatcher(a=current, b=option).ratio() for option in options} + options = [option for option, ratio in ratios.items() if ratio > 0.75] + return sorted(options, key=lambda v: (-ratios[v], v))[:n] def pretty_seq(args: Sequence[str], conjunction: str) -> str: diff --git a/mypy/semanal.py b/mypy/semanal.py index 13350b58af87..a58f4c9df310 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2531,7 +2531,7 @@ def report_missing_module_attribute( ) else: alternatives = set(module.names.keys()).difference({source_id}) - matches = best_matches(source_id, alternatives)[:3] + matches = best_matches(source_id, alternatives, n=3) if matches: suggestion = f"; maybe {pretty_seq(matches, 'or')}?" message += f"{suggestion}" diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index e59c295b58ac..ace28a18a5a8 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -87,7 +87,7 @@ class A: pass [case testMultipleKeywordsForMisspelling] def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other" or "atter"? +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "other"? class A: pass class B: pass @@ -99,7 +99,7 @@ class B: pass [case testKeywordMisspellingInheritance] def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? class A: pass class B(A): pass class C: pass @@ -107,7 +107,7 @@ class C: pass [case testKeywordMisspellingFloatInt] def f(atter: float, btter: int) -> None: pass # N: "f" defined here x: int = 5 -f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? +f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingVarArgs] def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 26bd0f92ed9e..b11a959df4cc 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -2871,7 +2871,7 @@ aaaaa: int [case testModuleAttributeThreeSuggestions] import m -m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aabaa", "aaaba", or "aaaab"? +m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aaaab", "aaaba", or "aabaa"? [file m.py] aaaab: int diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test index 8ffd7d2488dc..bc381293161f 100644 --- a/test-data/unit/semanal-modules.test +++ b/test-data/unit/semanal-modules.test @@ -814,7 +814,7 @@ def somef_unction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somef_unction" or "some_function"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function" or "somef_unction"? [case testImportMisspellingMultipleCandidatesTruncated] import f @@ -831,7 +831,7 @@ def somefun_ction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somefun_ction", "somefu_nction", or "somef_unction"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function", "somef_unction", or "somefu_nction"? [case testFromImportAsInStub] from m import * From 692af6d8c87e03c3a16cb67958d84219df823eaa Mon Sep 17 00:00:00 2001 From: johnthagen Date: Tue, 3 Jan 2023 09:38:33 -0500 Subject: [PATCH 164/292] Use secure HTTPS link for Mypy badge (#14383) Now that the site is accessible via HTTPS - #9919 Fetch the badge using a secure link, especially since this will be used an example for others. Also, non-secure badges will not be shown in some contexts, such as DockerHub, so this makes the example both more secure and more generally useful. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 01d876ead3e4..9d9618e6bc12 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Mypy: Static Typing for Python [![Build Status](https://github.com/python/mypy/actions/workflows/test.yml/badge.svg)](https://github.com/python/mypy/actions) [![Documentation Status](https://readthedocs.org/projects/mypy/badge/?version=latest)](https://mypy.readthedocs.io/en/latest/?badge=latest) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) +[![Checked with mypy](https://www.mypy-lang.org/static/mypy_badge.svg)](https://mypy-lang.org/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) From 24135782cd460507860c2b9256fb2ac6365e1b69 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 3 Jan 2023 14:45:46 -0800 Subject: [PATCH 165/292] More helpful error for missing self (#14386) Fixes #14385 --- mypy/semanal.py | 5 ++++- test-data/unit/check-classes.test | 4 ++-- test-data/unit/check-functions.test | 2 +- test-data/unit/check-super.test | 2 +- test-data/unit/fine-grained.test | 12 ++++++------ test-data/unit/semanal-errors.test | 6 +++--- 6 files changed, 17 insertions(+), 14 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index a58f4c9df310..aee355d7880d 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -947,7 +947,10 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: if func.name in ["__init_subclass__", "__class_getitem__"]: func.is_class = True if not func.arguments: - self.fail("Method must have at least one argument", func) + self.fail( + 'Method must have at least one argument. Did you forget the "self" argument?', + func, + ) elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 8784c73c5b17..b35b2f9e4e94 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -2901,7 +2901,7 @@ b.bad = 'a' # E: Incompatible types in assignment (expression has type "str", v from typing import Any class Test: - def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "Callable[[], None]" for "__setattr__" + def __setattr__() -> None: ... # E: Method must have at least one argument. Did you forget the "self" argument? # E: Invalid signature "Callable[[], None]" for "__setattr__" t = Test() t.crash = 'test' # E: "Test" has no attribute "crash" @@ -7120,7 +7120,7 @@ reveal_type(Foo().y) # N: Revealed type is "builtins.list[Any]" # flags: --check-untyped-defs class Foo: - def bad(): # E: Method must have at least one argument + def bad(): # E: Method must have at least one argument. Did you forget the "self" argument? self.x = 0 # E: Name "self" is not defined [case testTypeAfterAttributeAccessWithDisallowAnyExpr] diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index ae6424f743be..9afe9189caaa 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2708,7 +2708,7 @@ class A: @dec def e(self) -> int: pass @property - def g() -> int: pass # E: Method must have at least one argument + def g() -> int: pass # E: Method must have at least one argument. Did you forget the "self" argument? @property def h(self, *args, **kwargs) -> int: pass # OK [builtins fixtures/property.pyi] diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test index 6537f563a99c..b3379e505be7 100644 --- a/test-data/unit/check-super.test +++ b/test-data/unit/check-super.test @@ -365,7 +365,7 @@ class A: def f(self) -> None: pass class B(A): - def g() -> None: # E: Method must have at least one argument + def g() -> None: # E: Method must have at least one argument. Did you forget the "self" argument? super().f() # E: super() requires one or more positional arguments in enclosing function def h(self) -> None: def a() -> None: diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 1a318b52a082..5a7f21d48c20 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1586,11 +1586,11 @@ class A: [file b.py.3] 2 [out] -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testBaseClassDeleted] import m @@ -2007,11 +2007,11 @@ class A: class A: def foo(self) -> int: pass [out] -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == [case testPreviousErrorInMethodSemanal2] diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 5697d473414e..fffd02c9c337 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -542,7 +542,7 @@ import typing class A: def f(): pass [out] -main:3: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testInvalidBaseClass] import typing @@ -564,8 +564,8 @@ class A: def f() -> None: pass def g(): pass [out] -main:3: error: Method must have at least one argument -main:4: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? +main:4: error: Method must have at least one argument. Did you forget the "self" argument? [case testMultipleMethodDefinition] import typing From ca668055feceba63f2e441ec91ce76dac77eaf5d Mon Sep 17 00:00:00 2001 From: Richard Si Date: Thu, 5 Jan 2023 17:49:45 -0500 Subject: [PATCH 166/292] [mypyc] Compile away NewType type calls (#14398) For example, here the call to ID is simply a no-op at runtime, returning 1 unchanged. ID = NewType('ID', int) person = ID(1) Resolves https://github.com/mypyc/mypyc/issues/958 --- mypyc/irbuild/expression.py | 9 ++++++++- mypyc/test-data/irbuild-basic.test | 32 +++++++++++++++++++----------- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index b7d093cde7ee..b007435957b0 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -266,13 +266,20 @@ def transform_super_expr(builder: IRBuilder, o: SuperExpr) -> Value: def transform_call_expr(builder: IRBuilder, expr: CallExpr) -> Value: + callee = expr.callee if isinstance(expr.analyzed, CastExpr): return translate_cast_expr(builder, expr.analyzed) elif isinstance(expr.analyzed, AssertTypeExpr): # Compile to a no-op. return builder.accept(expr.analyzed.expr) + elif ( + isinstance(callee, (NameExpr, MemberExpr)) + and isinstance(callee.node, TypeInfo) + and callee.node.is_newtype + ): + # A call to a NewType type is a no-op at runtime. + return builder.accept(expr.args[0]) - callee = expr.callee if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): callee = callee.analyzed.expr # Unwrap type application diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 16b085ad4927..f72720e59b18 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -2338,11 +2338,8 @@ def __top_level__(): r92, r93, r94, r95 :: ptr r96 :: dict r97 :: str - r98, r99 :: object - r100 :: dict - r101 :: str - r102 :: int32 - r103 :: bit + r98 :: int32 + r99 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2454,13 +2451,9 @@ L2: set_mem r95, r91 :: builtins.object* keep_alive r88 r96 = __main__.globals :: static - r97 = 'Bar' - r98 = CPyDict_GetItem(r96, r97) - r99 = PyObject_CallFunctionObjArgs(r98, r88, 0) - r100 = __main__.globals :: static - r101 = 'y' - r102 = CPyDict_SetItem(r100, r101, r99) - r103 = r102 >= 0 :: signed + r97 = 'y' + r98 = CPyDict_SetItem(r96, r97, r88) + r99 = r98 >= 0 :: signed return 1 [case testChainedConditional] @@ -3584,3 +3577,18 @@ L0: r3 = 0.0 i__redef____redef__ = r3 return 1 + +[case testNewType] +from typing import NewType + +class A: pass + +N = NewType("N", A) + +def f(arg: A) -> N: + return N(arg) +[out] +def f(arg): + arg :: __main__.A +L0: + return arg From 25ccdfcd0ce788c0499c17e7320037eccaab49a9 Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Thu, 5 Jan 2023 22:30:53 -0500 Subject: [PATCH 167/292] Document that report generation disables cache (#14402) #6076 made report generation disable cache but we didn't document it. --- docs/source/config_file.rst | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index abaec31c6888..3b96e6bd7a5a 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -210,7 +210,7 @@ section of the command line docs. line. Mypy *will* recursively type check any submodules of the provided package. This flag is identical to :confval:`modules` apart from this behavior. - + This option may only be set in the global section (``[mypy]``). .. confval:: exclude @@ -901,6 +901,12 @@ Report generation If these options are set, mypy will generate a report in the specified format into the specified directory. +.. warning:: + + Generating reports disables incremental mode and can significantly slow down + your workflow. It is recommended to enable reporting only for specific runs + (e.g. in CI). + .. confval:: any_exprs_report :type: string From 98cc165a657a316accb93f1ed57fdc128b086d9f Mon Sep 17 00:00:00 2001 From: Christoph Tyralla Date: Fri, 6 Jan 2023 05:16:13 +0100 Subject: [PATCH 168/292] Fix inference for constrained type variables within unions (#14396) Fixes #3644 Handling of constrained type variables of function `filter_satisfiable` of module `constraints` was missing (as was indicated by a removed ToDo). --- mypy/constraints.py | 8 ++++++-- test-data/unit/check-unions.test | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index fe3c1a19ff18..0dd748ff85e1 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -420,8 +420,12 @@ def filter_satisfiable(option: list[Constraint] | None) -> list[Constraint] | No return option satisfiable = [] for c in option: - # TODO: add similar logic for TypeVar values (also in various other places)? - if mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): + if isinstance(c.origin_type_var, TypeVarType) and c.origin_type_var.values: + if any( + mypy.subtypes.is_subtype(c.target, value) for value in c.origin_type_var.values + ): + satisfiable.append(c) + elif mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): satisfiable.append(c) if not satisfiable: return None diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index a561c29e54f7..4c4fbc32ec3f 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -1171,6 +1171,25 @@ def foo( foo([1]) [builtins fixtures/list.pyi] +[case testGenericUnionMemberWithTypeVarConstraints] + +from typing import Generic, TypeVar, Union + +T = TypeVar('T', str, int) + +class C(Generic[T]): ... + +def f(s: Union[T, C[T]]) -> T: ... + +ci: C[int] +cs: C[str] + +reveal_type(f(1)) # N: Revealed type is "builtins.int" +reveal_type(f('')) # N: Revealed type is "builtins.str" +reveal_type(f(ci)) # N: Revealed type is "builtins.int" +reveal_type(f(cs)) # N: Revealed type is "builtins.str" + + [case testNestedInstanceTypeAliasUnsimplifiedUnion] from typing import TypeVar, Union, Iterator, List, Any T = TypeVar("T") From 7efe8e5487804014ecf8cb9b6e08f7194f61f963 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Mon, 9 Jan 2023 12:43:58 +1000 Subject: [PATCH 169/292] Fix `util.plural_s` for zero and negative (#14411) While this doesn't currently appear anywhere in usages, it would be incorrect to say `0 error found` or `-4 line earlier` --- mypy/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index cced4db34fc9..2c225c7fe651 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -816,7 +816,7 @@ def time_spent_us(t0: int) -> int: def plural_s(s: int | Sized) -> str: count = s if isinstance(s, int) else len(s) - if count > 1: + if count != 1: return "s" else: return "" From e959565ae9c8215dc25a82ff394574acd2363b47 Mon Sep 17 00:00:00 2001 From: jhance Date: Mon, 9 Jan 2023 08:59:57 -0800 Subject: [PATCH 170/292] Begin unifying logic for constraint building (#14406) Implements support for unpacking varlength tuples from *args, but because it became apparent that several parts of constraints building were doing nearly the same thing for typevar tuples, we begin extracting out some of the logic for re-use. Some existing callsites still should be switched to the new helpers but it is defered to future PRs. --- mypy/checkexpr.py | 31 +++- mypy/constraints.py | 214 ++++++++++++++---------- mypy/expandtype.py | 113 +++++++------ mypy/subtypes.py | 13 +- mypy/typeanal.py | 14 ++ mypy/types.py | 10 +- mypy/typevartuples.py | 38 ++++- test-data/unit/check-typevar-tuple.test | 90 ++++++++-- test-data/unit/semanal-errors.test | 10 +- test-data/unit/semanal-types.test | 25 +++ 10 files changed, 387 insertions(+), 171 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 5c6db86964ac..e6634e124d30 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -164,6 +164,7 @@ ) from mypy.typestate import type_state from mypy.typevars import fill_typevars +from mypy.typevartuples import find_unpack_in_list from mypy.util import split_module_names from mypy.visitor import ExpressionVisitor @@ -2064,12 +2065,30 @@ def check_argument_types( actual_kinds = [arg_kinds[a] for a in actuals] if isinstance(orig_callee_arg_type, UnpackType): unpacked_type = get_proper_type(orig_callee_arg_type.type) - # Only case we know of thus far. - assert isinstance(unpacked_type, TupleType) - actual_types = [arg_types[a] for a in actuals] - actual_kinds = [arg_kinds[a] for a in actuals] - callee_arg_types = unpacked_type.items - callee_arg_kinds = [ARG_POS] * len(actuals) + if isinstance(unpacked_type, TupleType): + inner_unpack_index = find_unpack_in_list(unpacked_type.items) + if inner_unpack_index is None: + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + inner_unpack = get_proper_type(unpacked_type.items[inner_unpack_index]) + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + # We assume heterogenous tuples are desugared earlier + assert isinstance(inner_unpacked_type, Instance) + assert inner_unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = ( + unpacked_type.items[:inner_unpack_index] + + [inner_unpacked_type.args[0]] + * (len(actuals) - len(unpacked_type.items) + 1) + + unpacked_type.items[inner_unpack_index + 1 :] + ) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + assert isinstance(unpacked_type, Instance) + assert unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = [unpacked_type.args[0]] * len(actuals) + callee_arg_kinds = [ARG_POS] * len(actuals) else: callee_arg_types = [orig_callee_arg_type] * len(actuals) callee_arg_kinds = [callee.arg_kinds[i]] * len(actuals) diff --git a/mypy/constraints.py b/mypy/constraints.py index 0dd748ff85e1..63e1672eb162 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -52,7 +52,6 @@ from mypy.typevartuples import ( extract_unpack, find_unpack_in_list, - split_with_instance, split_with_mapped_and_template, split_with_prefix_and_suffix, ) @@ -566,7 +565,7 @@ def visit_type_var_tuple(self, template: TypeVarTupleType) -> list[Constraint]: raise NotImplementedError def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: - raise NotImplementedError + raise RuntimeError("Mypy bug: unpack should be handled at a higher level.") def visit_parameters(self, template: Parameters) -> list[Constraint]: # constraining Any against C[P] turns into infer_against_any([P], Any) @@ -638,47 +637,22 @@ def visit_instance(self, template: Instance) -> list[Constraint]: tvars = mapped.type.defn.type_vars if instance.type.has_type_var_tuple_type: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - instance_prefix, instance_middle, instance_suffix = split_with_instance( - instance - ) - - # Add a constraint for the type var tuple, and then - # remove it for the case below. - instance_unpack = extract_unpack(instance_middle) - if instance_unpack is not None: - if isinstance(instance_unpack, TypeVarTupleType): - res.append( - Constraint( - instance_unpack, - SUBTYPE_OF, - TupleType(list(mapped_middle), instance_unpack.tuple_fallback), - ) - ) - elif ( - isinstance(instance_unpack, Instance) - and instance_unpack.type.fullname == "builtins.tuple" - ): - for item in mapped_middle: - res.extend( - infer_constraints( - instance_unpack.args[0], item, self.direction - ) - ) - elif isinstance(instance_unpack, TupleType): - if len(instance_unpack.items) == len(mapped_middle): - for instance_arg, item in zip( - instance_unpack.items, mapped_middle - ): - res.extend( - infer_constraints(instance_arg, item, self.direction) - ) - - mapped_args = mapped_prefix + mapped_suffix - instance_args = instance_prefix + instance_suffix - assert instance.type.type_var_tuple_prefix is not None assert instance.type.type_var_tuple_suffix is not None + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, instance_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + instance.args, + instance.type.type_var_tuple_prefix, + instance.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( tuple(tvars), instance.type.type_var_tuple_prefix, @@ -732,57 +706,22 @@ def visit_instance(self, template: Instance) -> list[Constraint]: mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars if template.type.has_type_var_tuple_type: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - template_prefix, template_middle, template_suffix = split_with_instance( - template - ) - split_result = split_with_mapped_and_template(mapped, template) - assert split_result is not None - ( - mapped_prefix, - mapped_middle, - mapped_suffix, - template_prefix, - template_middle, - template_suffix, - ) = split_result - - # Add a constraint for the type var tuple, and then - # remove it for the case below. - template_unpack = extract_unpack(template_middle) - if template_unpack is not None: - if isinstance(template_unpack, TypeVarTupleType): - res.append( - Constraint( - template_unpack, - SUPERTYPE_OF, - TupleType(list(mapped_middle), template_unpack.tuple_fallback), - ) - ) - elif ( - isinstance(template_unpack, Instance) - and template_unpack.type.fullname == "builtins.tuple" - ): - for item in mapped_middle: - res.extend( - infer_constraints( - template_unpack.args[0], item, self.direction - ) - ) - elif isinstance(template_unpack, TupleType): - if len(template_unpack.items) == len(mapped_middle): - for template_arg, item in zip( - template_unpack.items, mapped_middle - ): - res.extend( - infer_constraints(template_arg, item, self.direction) - ) - - mapped_args = mapped_prefix + mapped_suffix - template_args = template_prefix + template_suffix - + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None assert template.type.type_var_tuple_prefix is not None assert template.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, template_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + template.args, + template.type.type_var_tuple_prefix, + template.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( tuple(tvars), template.type.type_var_tuple_prefix, @@ -945,12 +884,28 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # We can't infer constraints from arguments if the template is Callable[..., T] # (with literal '...'). if not template.is_ellipsis_args: + if find_unpack_in_list(template.arg_types) is not None: + ( + unpack_constraints, + cactual_args_t, + template_args_t, + ) = find_and_build_constraints_for_unpack( + tuple(cactual.arg_types), tuple(template.arg_types), self.direction + ) + template_args = list(template_args_t) + cactual_args = list(cactual_args_t) + res.extend(unpack_constraints) + assert len(template_args) == len(cactual_args) + else: + template_args = template.arg_types + cactual_args = cactual.arg_types # The lengths should match, but don't crash (it will error elsewhere). - for t, a in zip(template.arg_types, cactual.arg_types): + for t, a in zip(template_args, cactual_args): # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) else: # sometimes, it appears we try to get constraints between two paramspec callables? + # TODO: Direction # TODO: check the prefixes match prefix = param_spec.prefix @@ -1197,3 +1152,80 @@ def find_matching_overload_items( # it maintains backward compatibility. res = items[:] return res + + +def find_and_build_constraints_for_unpack( + mapped: tuple[Type, ...], template: tuple[Type, ...], direction: int +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + mapped_prefix_len = find_unpack_in_list(mapped) + if mapped_prefix_len is not None: + mapped_suffix_len: int | None = len(mapped) - mapped_prefix_len - 1 + else: + mapped_suffix_len = None + + template_prefix_len = find_unpack_in_list(template) + assert template_prefix_len is not None + template_suffix_len = len(template) - template_prefix_len - 1 + + return build_constraints_for_unpack( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + direction, + ) + + +def build_constraints_for_unpack( + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, + direction: int, +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + split_result = split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) + assert split_result is not None + ( + mapped_prefix, + mapped_middle, + mapped_suffix, + template_prefix, + template_middle, + template_suffix, + ) = split_result + + template_unpack = extract_unpack(template_middle) + res = [] + + if template_unpack is not None: + if isinstance(template_unpack, TypeVarTupleType): + res.append( + Constraint( + template_unpack, + direction, + TupleType(list(mapped_middle), template_unpack.tuple_fallback), + ) + ) + elif ( + isinstance(template_unpack, Instance) + and template_unpack.type.fullname == "builtins.tuple" + ): + for item in mapped_middle: + res.extend(infer_constraints(template_unpack.args[0], item, direction)) + + elif isinstance(template_unpack, TupleType): + if len(template_unpack.items) == len(mapped_middle): + for template_arg, item in zip(template_unpack.items, mapped_middle): + res.extend(infer_constraints(template_arg, item, direction)) + return (res, mapped_prefix + mapped_suffix, template_prefix + template_suffix) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index ca562ede264f..1c3553fe5e53 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -292,59 +292,14 @@ def visit_callable_type(self, t: CallableType) -> Type: expanded_tuple = get_proper_type(var_arg.typ.type.accept(self)) # TODO: handle the case that expanded_tuple is a variable length tuple. assert isinstance(expanded_tuple, TupleType) - expanded_unpack_index = find_unpack_in_list(expanded_tuple.items) - # This is the case where we just have Unpack[Tuple[X1, X2, X3]] - # (for example if either the tuple had no unpacks, or the unpack in the - # tuple got fully expanded to something with fixed length) - if expanded_unpack_index is None: - arg_names = ( - t.arg_names[:star_index] - + [None] * len(expanded_tuple.items) - + t.arg_names[star_index + 1 :] - ) - arg_kinds = ( - t.arg_kinds[:star_index] - + [ARG_POS] * len(expanded_tuple.items) - + t.arg_kinds[star_index + 1 :] - ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_tuple.items - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - else: - # If Unpack[Ts] simplest form still has an unpack or is a - # homogenous tuple, then only the prefix can be represented as - # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] - # as the star arg, for example. - prefix_len = expanded_unpack_index - arg_names = ( - t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] - ) - arg_kinds = ( - t.arg_kinds[:star_index] - + [ARG_POS] * prefix_len - + t.arg_kinds[star_index:] - ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_tuple.items[:prefix_len] - # Constructing the Unpack containing the tuple without the prefix. - + [ - UnpackType( - expanded_tuple.copy_modified( - items=expanded_tuple.items[prefix_len:] - ) - ) - ] - + self.expand_types(t.arg_types[star_index + 1 :]) - ) + expanded_items = expanded_tuple.items else: - expanded = self.expand_unpack(var_arg.typ) - # Handle other cases later. - assert isinstance(expanded, list) - assert len(expanded) == 1 and isinstance(expanded[0], UnpackType) + expanded_items_res = self.expand_unpack(var_arg.typ) + # TODO: can it be anything except a list? + assert isinstance(expanded_items_res, list) + expanded_items = expanded_items_res + """ # In this case we keep the arg as ARG_STAR. arg_names = t.arg_names arg_kinds = t.arg_kinds @@ -353,6 +308,62 @@ def visit_callable_type(self, t: CallableType) -> Type: + expanded + self.expand_types(t.arg_types[star_index + 1 :]) ) + """ + + expanded_unpack_index = find_unpack_in_list(expanded_items) + # This is the case where we just have Unpack[Tuple[X1, X2, X3]] + # (for example if either the tuple had no unpacks, or the unpack in the + # tuple got fully expanded to something with fixed length) + if expanded_unpack_index is None: + arg_names = ( + t.arg_names[:star_index] + + [None] * len(expanded_items) + + t.arg_names[star_index + 1 :] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * len(expanded_items) + + t.arg_kinds[star_index + 1 :] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + # If Unpack[Ts] simplest form still has an unpack or is a + # homogenous tuple, then only the prefix can be represented as + # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] + # as the star arg, for example. + expanded_unpack = get_proper_type(expanded_items[expanded_unpack_index]) + assert isinstance(expanded_unpack, UnpackType) + + # Extract the typevartuple so we can get a tuple fallback from it. + expanded_unpacked_tvt = get_proper_type(expanded_unpack.type) + assert isinstance(expanded_unpacked_tvt, TypeVarTupleType) + + prefix_len = expanded_unpack_index + arg_names = ( + t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType( + TupleType( + expanded_items[prefix_len:], expanded_unpacked_tvt.tuple_fallback + ) + ) + if len(expanded_items) - prefix_len > 1 + else expanded_items[0] + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) else: arg_types = self.expand_types(t.arg_types) arg_names = t.arg_names diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 61ba7af5147f..83cb22d48fab 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -482,7 +482,18 @@ def visit_instance(self, left: Instance) -> bool: t = erased nominal = True if right.type.has_type_var_tuple_type: - split_result = fully_split_with_mapped_and_template(left, right) + assert left.type.type_var_tuple_prefix is not None + assert left.type.type_var_tuple_suffix is not None + assert right.type.type_var_tuple_prefix is not None + assert right.type.type_var_tuple_suffix is not None + split_result = fully_split_with_mapped_and_template( + left.args, + left.type.type_var_tuple_prefix, + left.type.type_var_tuple_suffix, + right.args, + right.type.type_var_tuple_prefix, + right.type.type_var_tuple_suffix, + ) if split_result is None: return False diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 0755b21854de..df74344fb392 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1274,6 +1274,7 @@ def analyze_callable_args( args: list[Type] = [] kinds: list[ArgKind] = [] names: list[str | None] = [] + found_unpack = False for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) @@ -1294,6 +1295,19 @@ def analyze_callable_args( if arg.name is not None and kind.is_star(): self.fail(f"{arg.constructor} arguments should not have names", arg) return None + elif isinstance(arg, UnboundType): + kind = ARG_POS + # Potentially a unpack. + sym = self.lookup_qualified(arg.name, arg) + if sym is not None: + if sym.fullname == "typing_extensions.Unpack": + if found_unpack: + self.fail("Callables can only have a single unpack", arg) + found_unpack = True + kind = ARG_STAR + args.append(arg) + kinds.append(kind) + names.append(None) else: args.append(arg) kinds.append(ARG_POS) diff --git a/mypy/types.py b/mypy/types.py index 354a740567dc..7af83b6c11d3 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3568,10 +3568,16 @@ def store_argument_type( if isinstance(arg_type, ParamSpecType): pass elif isinstance(arg_type, UnpackType): - if isinstance(get_proper_type(arg_type.type), TupleType): + unpacked_type = get_proper_type(arg_type.type) + if isinstance(unpacked_type, TupleType): # Instead of using Tuple[Unpack[Tuple[...]]], just use # Tuple[...] - arg_type = arg_type.type + arg_type = unpacked_type + elif ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + arg_type = unpacked_type else: arg_type = TupleType( [arg_type], diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index 4b3b5cc2dca7..29b85dae72eb 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -46,7 +46,12 @@ def split_with_instance( def split_with_mapped_and_template( - mapped: Instance, template: Instance + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, ) -> tuple[ tuple[Type, ...], tuple[Type, ...], @@ -55,7 +60,14 @@ def split_with_mapped_and_template( tuple[Type, ...], tuple[Type, ...], ] | None: - split_result = fully_split_with_mapped_and_template(mapped, template) + split_result = fully_split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) if split_result is None: return None @@ -83,7 +95,12 @@ def split_with_mapped_and_template( def fully_split_with_mapped_and_template( - mapped: Instance, template: Instance + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, ) -> tuple[ tuple[Type, ...], tuple[Type, ...], @@ -96,8 +113,19 @@ def fully_split_with_mapped_and_template( tuple[Type, ...], tuple[Type, ...], ] | None: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - template_prefix, template_middle, template_suffix = split_with_instance(template) + if mapped_prefix_len is not None: + assert mapped_suffix_len is not None + mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix( + tuple(mapped), mapped_prefix_len, mapped_suffix_len + ) + else: + mapped_prefix = tuple() + mapped_suffix = tuple() + mapped_middle = mapped + + template_prefix, template_middle, template_suffix = split_with_prefix_and_suffix( + tuple(template), template_prefix_len, template_suffix_len + ) unpack_prefix = find_unpack_in_list(template_middle) if unpack_prefix is None: diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index d85990293aea..f61b53dcd2c0 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -178,7 +178,7 @@ Ts = TypeVarTuple("Ts") B = Ts # E: Type variable "__main__.Ts" is invalid as target for type alias [builtins fixtures/tuple.pyi] -[case testPep646ArrayExample] +[case testTypeVarTuplePep646ArrayExample] from typing import Generic, Tuple, TypeVar, Protocol, NewType from typing_extensions import TypeVarTuple, Unpack @@ -213,7 +213,7 @@ reveal_type(abs(x)) # N: Revealed type is "__main__.Array[__main__.Height, __ma reveal_type(x + x) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" [builtins fixtures/tuple.pyi] -[case testPep646ArrayExampleWithDType] +[case testTypeVarTuplePep646ArrayExampleWithDType] from typing import Generic, Tuple, TypeVar, Protocol, NewType from typing_extensions import TypeVarTuple, Unpack @@ -250,7 +250,7 @@ reveal_type(x + x) # N: Revealed type is "__main__.Array[builtins.float, __main [builtins fixtures/tuple.pyi] -[case testPep646ArrayExampleInfer] +[case testTypeVarTuplePep646ArrayExampleInfer] from typing import Generic, Tuple, TypeVar, NewType from typing_extensions import TypeVarTuple, Unpack @@ -265,7 +265,7 @@ class Array(Generic[Unpack[Shape]]): x: Array[float, Height, Width] = Array() [builtins fixtures/tuple.pyi] -[case testPep646TypeConcatenation] +[case testTypeVarTuplePep646TypeConcatenation] from typing import Generic, TypeVar, NewType from typing_extensions import TypeVarTuple, Unpack @@ -295,7 +295,7 @@ d = add_batch_channels(a) reveal_type(d) # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width, __main__.Channels]" [builtins fixtures/tuple.pyi] -[case testPep646TypeVarConcatenation] +[case testTypeVarTuplePep646TypeVarConcatenation] from typing import Generic, TypeVar, NewType, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -311,7 +311,7 @@ def prefix_tuple( z = prefix_tuple(x=0, y=(True, 'a')) reveal_type(z) # N: Revealed type is "Tuple[builtins.int, builtins.bool, builtins.str]" [builtins fixtures/tuple.pyi] -[case testPep646TypeVarTupleUnpacking] +[case testTypeVarTuplePep646TypeVarTupleUnpacking] from typing import Generic, TypeVar, NewType, Any, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -367,7 +367,7 @@ reveal_type(bad2) # N: Revealed type is "def (x: Tuple[builtins.int, Unpack[bui [builtins fixtures/tuple.pyi] -[case testPep646TypeVarStarArgs] +[case testTypeVarTuplePep646TypeVarStarArgsBasic] from typing import Tuple from typing_extensions import TypeVarTuple, Unpack @@ -381,6 +381,13 @@ def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" +[builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646TypeVarStarArgs] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]: reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" return args @@ -404,14 +411,69 @@ with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" def foo(*args: Unpack[Ts]) -> None: reveal_type(with_prefix_suffix(True, "bar", *args, 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" -def concrete(*args: Unpack[Tuple[int, str]]) -> None: + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsFixedLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, str]]) -> None: reveal_type(args) # N: Revealed type is "Tuple[builtins.int, builtins.str]" -concrete(0, "foo") -concrete(0, 1) # E: Argument 2 to "concrete" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" -concrete("foo", "bar") # E: Argument 1 to "concrete" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" -concrete(0, "foo", 1) # E: Invalid number of arguments -concrete(0) # E: Invalid number of arguments -concrete() # E: Invalid number of arguments +foo(0, "foo") +foo(0, 1) # E: Argument 2 to "foo" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" +foo("foo", "bar") # E: Argument 1 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" +foo(0, "foo", 1) # E: Invalid number of arguments +foo(0) # E: Invalid number of arguments +foo() # E: Invalid number of arguments +foo(*(0, "foo")) + +# TODO: fix this case to do something sensible. +#def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: +# reveal_type(args) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, ...]]) -> None: + reveal_type(args) # N: Revealed type is "builtins.tuple[builtins.int, ...]" + +foo(0, 1, 2) +# TODO: this should say 'expected "int"' rather than the unpack +foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, ...]]" + + +def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.bool, builtins.bool]" + # TODO: generate an error + # reveal_type(args[1]) + +foo2("bar", 1, 2, 3, False, True) +foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", "bar", 2, 3, False, True) # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", 1, 2, 3, 4, True) # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2(*("bar", 1, 2, 3, False, True)) +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646Callable] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call( + target: Callable[[ Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func(arg1: int, arg2: str) -> None: ... +call(target=func, args=(0, 'foo')) # Valid +#call(target=func, args=(True, 'foo', 0)) # Error +#call(target=func, args=(0, 0, 'foo')) # Error [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index fffd02c9c337..a4ed905dcb9f 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1474,4 +1474,12 @@ y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]): # E: Can only use one type var tuple in a class def pass -[builtins fixtures/tuple.pyi] + +# TODO: this should generate an error +#def bad_args(*args: TVariadic): +# pass + +def bad_kwargs(**kwargs: Unpack[TVariadic]): # E: Unpack item in ** argument must be a TypedDict + pass + +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 77ef10a26b13..494d701b758a 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1560,4 +1560,29 @@ MypyFile:1( AssignmentStmt:2( NameExpr(TV* [__main__.TV]) TypeVarTupleExpr:2())) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleCallable] +from typing_extensions import TypeVarTuple, Unpack +from typing import Callable +Ts = TypeVarTuple("Ts") + +def foo(x: Callable[[Unpack[Ts]], None]) -> None: + pass +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [TypeVarTuple, Unpack]) + ImportFrom:2(typing, [Callable]) + AssignmentStmt:3( + NameExpr(Ts* [__main__.Ts]) + TypeVarTupleExpr:3()) + FuncDef:5( + foo + Args( + Var(x)) + def [Ts] (x: def (*Unpack[Ts`-1])) + Block:5( + PassStmt:6()))) + [builtins fixtures/tuple.pyi] From 4ec6ea519f02dc918ad1e55c1096a1009079bfac Mon Sep 17 00:00:00 2001 From: Richard Si Date: Mon, 9 Jan 2023 12:06:07 -0500 Subject: [PATCH 171/292] [mypyc] Don't load forward ref targets while setting up non-ext __annotations__ (#14401) Take this example: ``` from typing import NamedTuple class VTableMethod(NamedTuple): cls: "ClassIR" class ClassIR: pass ``` In irbuild::classdef::add_non_ext_class_attr_ann(), mypyc tries to assign the ClassIR type object to VTableMethod's `__annotations__`. This causes a segfault as ClassIR won't be initialized and allocated until *after* the NamedTuple is set up. Fortunately, AssignmentStmt preserves the unanalyzed type (UnboundType). If `stmt.unanalyzed_type.orginal_str_expr` is not None, then we know we're dealing with a forward ref and should just load the string instead. Unfortunately, it seems difficult (or impossible?) to infer whether an annotation is a forward reference when the annotations future is enabled and the annotation isn't a string. Fixes https://github.com/mypyc/mypyc/issues/938 Dataclasses are still broken as strings in `__annotations__` cause dataclass's internals to lookup the module in sys.modules which fails (as the module hasn't been fully initialized and added to sys.modules yet!) --- mypyc/irbuild/classdef.py | 15 +++++++++++++-- mypyc/test-data/run-tuples.test | 10 ++++++---- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index b1f2ed1a1a65..d49c0e580c91 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -24,7 +24,7 @@ TypeInfo, is_class_var, ) -from mypy.types import ENUM_REMOVED_PROPS, Instance, get_proper_type +from mypy.types import ENUM_REMOVED_PROPS, Instance, UnboundType, get_proper_type from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import FuncDecl, FuncSignature from mypyc.ir.ops import ( @@ -556,6 +556,7 @@ def add_non_ext_class_attr_ann( get_type_info: Callable[[AssignmentStmt], TypeInfo | None] | None = None, ) -> None: """Add a class attribute to __annotations__ of a non-extension class.""" + # FIXME: try to better preserve the special forms and type parameters of generics. typ: Value | None = None if get_type_info is not None: type_info = get_type_info(stmt) @@ -565,7 +566,17 @@ def add_non_ext_class_attr_ann( if typ is None: # FIXME: if get_type_info is not provided, don't fall back to stmt.type? ann_type = get_proper_type(stmt.type) - if isinstance(ann_type, Instance): + if ( + isinstance(stmt.unanalyzed_type, UnboundType) + and stmt.unanalyzed_type.original_str_expr is not None + ): + # Annotation is a forward reference, so don't attempt to load the actual + # type and load the string instead. + # + # TODO: is it possible to determine whether a non-string annotation is + # actually a forward reference due to the __annotations__ future? + typ = builder.load_str(stmt.unanalyzed_type.original_str_expr) + elif isinstance(ann_type, Instance): typ = load_type(builder, ann_type.type, stmt.line) else: typ = builder.add(LoadAddress(type_object_op.type, type_object_op.src, stmt.line)) diff --git a/mypyc/test-data/run-tuples.test b/mypyc/test-data/run-tuples.test index 26b039320844..f6c92b9c720f 100644 --- a/mypyc/test-data/run-tuples.test +++ b/mypyc/test-data/run-tuples.test @@ -99,8 +99,6 @@ assert f(Sub(3, 2)) == 3 [case testNamedTupleClassSyntax] from typing import Dict, List, NamedTuple, Optional, Tuple, Union -class ClassIR: pass - class FuncIR: pass StealsDescription = Union[bool, List[bool]] @@ -119,8 +117,12 @@ class Record(NamedTuple): ordering: Optional[List[int]] extra_int_constants: List[Tuple[int]] +# Make sure mypyc loads the annotation string for this forward reference. +# Ref: https://github.com/mypyc/mypyc/issues/938 +class ClassIR: pass + [file driver.py] -from typing import Optional +from typing import ForwardRef, Optional from native import ClassIR, FuncIR, Record assert Record.__annotations__ == { @@ -129,7 +131,7 @@ assert Record.__annotations__ == { 'is_borrowed': bool, 'hash': str, 'python_path': tuple, - 'type': ClassIR, + 'type': ForwardRef('ClassIR'), 'method': FuncIR, 'shadow_method': type, 'classes': dict, From e1117c3fc49c0836bf098b5c5dd8027c62b3186c Mon Sep 17 00:00:00 2001 From: Richard Si Date: Tue, 10 Jan 2023 05:53:43 -0500 Subject: [PATCH 172/292] [mypyc] Precompute set literals for "in" ops against / iteration over set literals (#14409) Towards https://github.com/mypyc/mypyc/issues/726. (There's a Python compatibility bug that needs to be fixed before the issue can be closed.) For example, the set literals here are now precomputed as frozensets at module initialization. ``` x in {1, 2.0, "3"} x not in {1, 2.0, "3"} for _ in {1, 2.0, "3"}: ... ``` Set literal items supported: - Anything supported by `irbuild.constant_fold.constant_fold_expr()` - String and integer literals - Final references to int/str values - Certain int and str unary/binary ops that evaluate to a constant value - `None`, `True`, and `False` - Float, byte, and complex literals - Tuple literals with only items listed above **Results** (using gcc-9 on 64-bit Ubuntu) Master @ 98cc165a657a316accb93f1ed57fdc128b086d9f running in_set .......... interpreted: 0.495790s (avg of 5 iterations; stdev 6.8%) compiled: 0.810029s (avg of 5 iterations; stdev 1.5%) compiled is 0.612x faster running set_literal_iteration ......................................................................................... interpreted: 0.020255s (avg of 45 iterations; stdev 2.5%) compiled: 0.016336s (avg of 45 iterations; stdev 1.8%) compiled is 1.240x faster This PR running in_set .......... interpreted: 0.502020s (avg of 5 iterations; stdev 1.1%) compiled: 0.390281s (avg of 5 iterations; stdev 6.2%) compiled is 1.286x faster running set_literal_iteration .............................................................................................. interpreted: 0.019917s (avg of 47 iterations; stdev 2.2%) compiled: 0.007134s (avg of 47 iterations; stdev 2.6%) compiled is 2.792x faster Benchmarks can be found here: mypyc/mypyc-benchmarks#32 --- mypyc/analysis/ircheck.py | 14 +++ mypyc/codegen/emitmodule.py | 5 +- mypyc/codegen/literals.py | 46 +++++--- mypyc/ir/ops.py | 9 +- mypyc/ir/pprint.py | 13 ++- mypyc/irbuild/builder.py | 7 +- mypyc/irbuild/expression.py | 73 ++++++++++++- mypyc/irbuild/for_helpers.py | 15 ++- mypyc/irbuild/util.py | 10 ++ mypyc/lib-rt/CPy.h | 3 +- mypyc/lib-rt/misc_ops.c | 21 +++- mypyc/primitives/set_ops.py | 2 +- mypyc/test-data/fixtures/ir.py | 2 + mypyc/test-data/irbuild-set.test | 182 +++++++++++++++++++++++++++++++ mypyc/test-data/run-sets.test | 33 ++++++ 15 files changed, 403 insertions(+), 32 deletions(-) diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index b141784ef9ff..e96c640fa8a1 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -252,6 +252,15 @@ def check_tuple_items_valid_literals(self, op: LoadLiteral, t: tuple[object, ... if isinstance(x, tuple): self.check_tuple_items_valid_literals(op, x) + def check_frozenset_items_valid_literals(self, op: LoadLiteral, s: frozenset[object]) -> None: + for x in s: + if x is None or isinstance(x, (str, bytes, bool, int, float, complex)): + pass + elif isinstance(x, tuple): + self.check_tuple_items_valid_literals(op, x) + else: + self.fail(op, f"Invalid type for item of frozenset literal: {type(x)})") + def visit_load_literal(self, op: LoadLiteral) -> None: expected_type = None if op.value is None: @@ -271,6 +280,11 @@ def visit_load_literal(self, op: LoadLiteral) -> None: elif isinstance(op.value, tuple): expected_type = "builtins.tuple" self.check_tuple_items_valid_literals(op, op.value) + elif isinstance(op.value, frozenset): + # There's no frozenset_rprimitive type since it'd be pretty useless so we just pretend + # it's a set (when it's really a frozenset). + expected_type = "builtins.set" + self.check_frozenset_items_valid_literals(op, op.value) assert expected_type is not None, "Missed a case for LoadLiteral check" diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 5dacaf6acab6..9f65aa77c47f 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -669,6 +669,9 @@ def generate_literal_tables(self) -> None: # Descriptions of tuple literals init_tuple = c_array_initializer(literals.encoded_tuple_values()) self.declare_global("const int []", "CPyLit_Tuple", initializer=init_tuple) + # Descriptions of frozenset literals + init_frozenset = c_array_initializer(literals.encoded_frozenset_values()) + self.declare_global("const int []", "CPyLit_FrozenSet", initializer=init_frozenset) def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> None: """Generate the declaration and definition of the group's export struct. @@ -839,7 +842,7 @@ def generate_globals_init(self, emitter: Emitter) -> None: for symbol, fixup in self.simple_inits: emitter.emit_line(f"{symbol} = {fixup};") - values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple" + values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple, CPyLit_FrozenSet" emitter.emit_lines( f"if (CPyStatics_Initialize(CPyStatics, {values}) < 0) {{", "return -1;", "}" ) diff --git a/mypyc/codegen/literals.py b/mypyc/codegen/literals.py index 29957d52101c..784a8ed27c4e 100644 --- a/mypyc/codegen/literals.py +++ b/mypyc/codegen/literals.py @@ -1,12 +1,13 @@ from __future__ import annotations -from typing import Any, Tuple, Union, cast +from typing import Any, Dict, FrozenSet, List, Tuple, Union, cast from typing_extensions import Final -# Supported Python literal types. All tuple items must have supported +# Supported Python literal types. All tuple / frozenset items must have supported # literal types as well, but we can't represent the type precisely. -LiteralValue = Union[str, bytes, int, bool, float, complex, Tuple[object, ...], None] - +LiteralValue = Union[ + str, bytes, int, bool, float, complex, Tuple[object, ...], FrozenSet[object], None +] # Some literals are singletons and handled specially (None, False and True) NUM_SINGLETONS: Final = 3 @@ -23,6 +24,7 @@ def __init__(self) -> None: self.float_literals: dict[float, int] = {} self.complex_literals: dict[complex, int] = {} self.tuple_literals: dict[tuple[object, ...], int] = {} + self.frozenset_literals: dict[frozenset[object], int] = {} def record_literal(self, value: LiteralValue) -> None: """Ensure that the literal value is available in generated code.""" @@ -55,6 +57,12 @@ def record_literal(self, value: LiteralValue) -> None: for item in value: self.record_literal(cast(Any, item)) tuple_literals[value] = len(tuple_literals) + elif isinstance(value, frozenset): + frozenset_literals = self.frozenset_literals + if value not in frozenset_literals: + for item in value: + self.record_literal(cast(Any, item)) + frozenset_literals[value] = len(frozenset_literals) else: assert False, "invalid literal: %r" % value @@ -86,6 +94,9 @@ def literal_index(self, value: LiteralValue) -> int: n += len(self.complex_literals) if isinstance(value, tuple): return n + self.tuple_literals[value] + n += len(self.tuple_literals) + if isinstance(value, frozenset): + return n + self.frozenset_literals[value] assert False, "invalid literal: %r" % value def num_literals(self) -> int: @@ -98,6 +109,7 @@ def num_literals(self) -> int: + len(self.float_literals) + len(self.complex_literals) + len(self.tuple_literals) + + len(self.frozenset_literals) ) # The following methods return the C encodings of literal values @@ -119,24 +131,32 @@ def encoded_complex_values(self) -> list[str]: return _encode_complex_values(self.complex_literals) def encoded_tuple_values(self) -> list[str]: - """Encode tuple values into a C array. + return self._encode_collection_values(self.tuple_literals) + + def encoded_frozenset_values(self) -> List[str]: + return self._encode_collection_values(self.frozenset_literals) + + def _encode_collection_values( + self, values: dict[tuple[object, ...], int] | dict[frozenset[object], int] + ) -> list[str]: + """Encode tuple/frozenset values into a C array. The format of the result is like this: - - + + ... - + ... """ - values = self.tuple_literals - value_by_index = {index: value for value, index in values.items()} + # FIXME: https://github.com/mypyc/mypyc/issues/965 + value_by_index = {index: value for value, index in cast(Dict[Any, int], values).items()} result = [] - num = len(values) - result.append(str(num)) - for i in range(num): + count = len(values) + result.append(str(count)) + for i in range(count): value = value_by_index[i] result.append(str(len(value))) for item in value: diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 1f79ba829d76..cc6f542c3e23 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -39,6 +39,7 @@ ) if TYPE_CHECKING: + from mypyc.codegen.literals import LiteralValue from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR @@ -588,7 +589,7 @@ class LoadLiteral(RegisterOp): This is used to load a static PyObject * value corresponding to a literal of one of the supported types. - Tuple literals must contain only valid literal values as items. + Tuple / frozenset literals must contain only valid literal values as items. NOTE: You can use this to load boxed (Python) int objects. Use Integer to load unboxed, tagged integers or fixed-width, @@ -603,11 +604,7 @@ class LoadLiteral(RegisterOp): error_kind = ERR_NEVER is_borrowed = True - def __init__( - self, - value: None | str | bytes | bool | int | float | complex | tuple[object, ...], - rtype: RType, - ) -> None: + def __init__(self, value: LiteralValue, rtype: RType) -> None: self.value = value self.type = rtype diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index a9324a8608e4..cb9e4a2d2541 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -106,7 +106,18 @@ def visit_load_literal(self, op: LoadLiteral) -> str: # it explicit that this is a Python object. if isinstance(op.value, int): prefix = "object " - return self.format("%r = %s%s", op, prefix, repr(op.value)) + + rvalue = repr(op.value) + if isinstance(op.value, frozenset): + # We need to generate a string representation that won't vary + # run-to-run because sets are unordered, otherwise we may get + # spurious irbuild test failures. + # + # Sorting by the item's string representation is a bit of a + # hack, but it's stable and won't cause TypeErrors. + formatted_items = [repr(i) for i in sorted(op.value, key=str)] + rvalue = "frozenset({" + ", ".join(formatted_items) + "})" + return self.format("%r = %s%s", op, prefix, rvalue) def visit_get_attr(self, op: GetAttr) -> str: return self.format("%r = %s%r.%s", op, self.borrow_prefix(op), op.obj, op.attr) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 792697970785..c24207ac64ec 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -118,7 +118,7 @@ AssignmentTargetRegister, AssignmentTargetTuple, ) -from mypyc.irbuild.util import is_constant +from mypyc.irbuild.util import bytes_from_str, is_constant from mypyc.options import CompilerOptions from mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op, next_op, py_setattr_op @@ -296,8 +296,7 @@ def load_bytes_from_str_literal(self, value: str) -> Value: are stored in BytesExpr.value, whose type is 'str' not 'bytes'. Thus we perform a special conversion here. """ - bytes_value = bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") - return self.builder.load_bytes(bytes_value) + return self.builder.load_bytes(bytes_from_str(value)) def load_int(self, value: int) -> Value: return self.builder.load_int(value) @@ -886,7 +885,7 @@ def get_dict_base_type(self, expr: Expression) -> Instance: This is useful for dict subclasses like SymbolTable. """ target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) + assert isinstance(target_type, Instance), target_type dict_base = next(base for base in target_type.type.mro if base.fullname == "builtins.dict") return map_instance_to_supertype(target_type, dict_base) diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index b007435957b0..3f5b795a1436 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Callable, cast +from typing import Callable, Sequence, cast from mypy.nodes import ( ARG_POS, @@ -55,6 +55,7 @@ ComparisonOp, Integer, LoadAddress, + LoadLiteral, RaiseStandardError, Register, TupleGet, @@ -63,12 +64,14 @@ ) from mypyc.ir.rtypes import ( RTuple, + bool_rprimitive, int_rprimitive, is_fixed_width_rtype, is_int_rprimitive, is_list_rprimitive, is_none_rprimitive, object_rprimitive, + set_rprimitive, ) from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op @@ -86,6 +89,7 @@ tokenizer_printf_style, ) from mypyc.irbuild.specialize import apply_function_specialization, apply_method_specialization +from mypyc.irbuild.util import bytes_from_str from mypyc.primitives.bytes_ops import bytes_slice_op from mypyc.primitives.dict_ops import dict_get_item_op, dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op @@ -93,7 +97,7 @@ from mypyc.primitives.list_ops import list_append_op, list_extend_op, list_slice_op from mypyc.primitives.misc_ops import ellipsis_op, get_module_dict_op, new_slice_op, type_op from mypyc.primitives.registry import CFunctionDescription, builtin_names -from mypyc.primitives.set_ops import set_add_op, set_update_op +from mypyc.primitives.set_ops import set_add_op, set_in_op, set_update_op from mypyc.primitives.str_ops import str_slice_op from mypyc.primitives.tuple_ops import list_tuple_op, tuple_slice_op @@ -613,6 +617,54 @@ def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Val return target +def set_literal_values(builder: IRBuilder, items: Sequence[Expression]) -> list[object] | None: + values: list[object] = [] + for item in items: + const_value = constant_fold_expr(builder, item) + if const_value is not None: + values.append(const_value) + continue + + if isinstance(item, RefExpr): + if item.fullname == "builtins.None": + values.append(None) + elif item.fullname == "builtins.True": + values.append(True) + elif item.fullname == "builtins.False": + values.append(False) + elif isinstance(item, (BytesExpr, FloatExpr, ComplexExpr)): + # constant_fold_expr() doesn't handle these (yet?) + v = bytes_from_str(item.value) if isinstance(item, BytesExpr) else item.value + values.append(v) + elif isinstance(item, TupleExpr): + tuple_values = set_literal_values(builder, item.items) + if tuple_values is not None: + values.append(tuple(tuple_values)) + + if len(values) != len(items): + # Bail if not all items can be converted into values. + return None + return values + + +def precompute_set_literal(builder: IRBuilder, s: SetExpr) -> Value | None: + """Try to pre-compute a frozenset literal during module initialization. + + Return None if it's not possible. + + Supported items: + - Anything supported by irbuild.constant_fold.constant_fold_expr() + - None, True, and False + - Float, byte, and complex literals + - Tuple literals with only items listed above + """ + values = set_literal_values(builder, s.items) + if values is not None: + return builder.add(LoadLiteral(frozenset(values), set_rprimitive)) + + return None + + def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: # x in (...)/[...] # x not in (...)/[...] @@ -666,6 +718,23 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: else: return builder.true() + # x in {...} + # x not in {...} + if ( + first_op in ("in", "not in") + and len(e.operators) == 1 + and isinstance(e.operands[1], SetExpr) + ): + set_literal = precompute_set_literal(builder, e.operands[1]) + if set_literal is not None: + lhs = e.operands[0] + result = builder.builder.call_c( + set_in_op, [builder.accept(lhs), set_literal], e.line, bool_rprimitive + ) + if first_op == "not in": + return builder.unary_op(result, "not", e.line) + return result + if len(e.operators) == 1: # Special some common simple cases if first_op in ("is", "is not"): diff --git a/mypyc/irbuild/for_helpers.py b/mypyc/irbuild/for_helpers.py index fc67178af5de..61dbbe960eb2 100644 --- a/mypyc/irbuild/for_helpers.py +++ b/mypyc/irbuild/for_helpers.py @@ -17,6 +17,7 @@ Lvalue, MemberExpr, RefExpr, + SetExpr, TupleExpr, TypeAlias, ) @@ -469,12 +470,22 @@ def make_for_loop_generator( for_dict_gen.init(expr_reg, target_type) return for_dict_gen + iterable_expr_reg: Value | None = None + if isinstance(expr, SetExpr): + # Special case "for x in ". + from mypyc.irbuild.expression import precompute_set_literal + + set_literal = precompute_set_literal(builder, expr) + if set_literal is not None: + iterable_expr_reg = set_literal + # Default to a generic for loop. - expr_reg = builder.accept(expr) + if iterable_expr_reg is None: + iterable_expr_reg = builder.accept(expr) for_obj = ForIterable(builder, index, body_block, loop_exit, line, nested) item_type = builder._analyze_iterable_item_type(expr) item_rtype = builder.type_to_rtype(item_type) - for_obj.init(expr_reg, item_rtype) + for_obj.init(iterable_expr_reg, item_rtype) return for_obj diff --git a/mypyc/irbuild/util.py b/mypyc/irbuild/util.py index f50241b96cb3..ed01a59d1214 100644 --- a/mypyc/irbuild/util.py +++ b/mypyc/irbuild/util.py @@ -177,3 +177,13 @@ def is_constant(e: Expression) -> bool: ) ) ) + + +def bytes_from_str(value: str) -> bytes: + """Convert a string representing bytes into actual bytes. + + This is needed because the literal characters of BytesExpr (the + characters inside b'') are stored in BytesExpr.value, whose type is + 'str' not 'bytes'. + """ + return bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 7ee914a037dc..befa397051ef 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -597,7 +597,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples); + const int *tuples, + const int *frozensets); PyObject *CPy_Super(PyObject *builtins, PyObject *self); PyObject *CPy_CallReverseOpMethod(PyObject *left, PyObject *right, const char *op, _Py_Identifier *method); diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index 25f33c5f56c7..5fda78704bbc 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -535,7 +535,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples) { + const int *tuples, + const int *frozensets) { PyObject **result = statics; // Start with some hard-coded values *result++ = Py_None; @@ -635,6 +636,24 @@ int CPyStatics_Initialize(PyObject **statics, *result++ = obj; } } + if (frozensets) { + int num = *frozensets++; + while (num-- > 0) { + int num_items = *frozensets++; + PyObject *obj = PyFrozenSet_New(NULL); + if (obj == NULL) { + return -1; + } + for (int i = 0; i < num_items; i++) { + PyObject *item = statics[*frozensets++]; + Py_INCREF(item); + if (PySet_Add(obj, item) == -1) { + return -1; + } + } + *result++ = obj; + } + } return 0; } diff --git a/mypyc/primitives/set_ops.py b/mypyc/primitives/set_ops.py index 801fdad34ea4..fcfb7847dc7d 100644 --- a/mypyc/primitives/set_ops.py +++ b/mypyc/primitives/set_ops.py @@ -54,7 +54,7 @@ ) # item in set -binary_op( +set_in_op = binary_op( name="in", arg_types=[object_rprimitive, set_rprimitive], return_type=c_int_rprimitive, diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 0e437f4597ea..2f3c18e9c731 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -221,12 +221,14 @@ def clear(self) -> None: pass def pop(self) -> T: pass def update(self, x: Iterable[S]) -> None: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... class frozenset(Generic[T]): def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass def __iter__(self) -> Iterator[T]: pass def __len__(self) -> int: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... class slice: pass diff --git a/mypyc/test-data/irbuild-set.test b/mypyc/test-data/irbuild-set.test index fec76751c915..c567422abac7 100644 --- a/mypyc/test-data/irbuild-set.test +++ b/mypyc/test-data/irbuild-set.test @@ -655,3 +655,185 @@ L0: r12 = PySet_Add(r0, r11) r13 = r12 >= 0 :: signed return r0 + +[case testOperatorInSetLiteral] +from typing_extensions import Final + +CONST: Final = "daylily" +non_const = 10 + +def precomputed(i: object) -> bool: + return i in {1, 2.0, 1 +2, 4j, "foo", b"bar", CONST, (None, (27,)), (), False} +def not_precomputed_non_final_name(i: int) -> bool: + return i in {non_const} +def not_precomputed_nested_set(i: int) -> bool: + return i in {frozenset({1}), 2} +[out] +def precomputed(i): + i :: object + r0 :: set + r1 :: int32 + r2 :: bit + r3 :: bool +L0: + r0 = frozenset({(), (None, (27,)), 1, 2.0, 3, 4j, False, b'bar', 'daylily', 'foo'}) + r1 = PySet_Contains(r0, i) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + return r3 +def not_precomputed_non_final_name(i): + i :: int + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = box(int, i) + r9 = PySet_Contains(r4, r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + return r11 +def not_precomputed_nested_set(i): + i :: int + r0 :: set + r1 :: object + r2 :: int32 + r3 :: bit + r4 :: object + r5 :: set + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: object + r12 :: int32 + r13 :: bit + r14 :: bool +L0: + r0 = PySet_New(0) + r1 = object 1 + r2 = PySet_Add(r0, r1) + r3 = r2 >= 0 :: signed + r4 = PyFrozenSet_New(r0) + r5 = PySet_New(0) + r6 = PySet_Add(r5, r4) + r7 = r6 >= 0 :: signed + r8 = object 2 + r9 = PySet_Add(r5, r8) + r10 = r9 >= 0 :: signed + r11 = box(int, i) + r12 = PySet_Contains(r5, r11) + r13 = r12 >= 0 :: signed + r14 = truncate r12: int32 to builtins.bool + return r14 + +[case testForSetLiteral] +from typing_extensions import Final + +CONST: Final = 10 +non_const = 20 + +def precomputed() -> None: + for _ in {"None", "True", "False"}: + pass + +def precomputed2() -> None: + for _ in {None, False, 1, 2.0, "4", b"5", (6,), 7j, CONST, CONST + 1}: + pass + +def not_precomputed() -> None: + for not_optimized in {non_const}: + pass + +[out] +def precomputed(): + r0 :: set + r1, r2 :: object + r3 :: str + _ :: object + r4 :: bit +L0: + r0 = frozenset({'False', 'None', 'True'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + r3 = cast(str, r2) + _ = r3 +L3: + goto L1 +L4: + r4 = CPy_NoErrOccured() +L5: + return 1 +def precomputed2(): + r0 :: set + r1, r2, _ :: object + r3 :: bit +L0: + r0 = frozenset({(6,), 1, 10, 11, 2.0, '4', 7j, False, None, b'5'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + _ = r2 +L3: + goto L1 +L4: + r3 = CPy_NoErrOccured() +L5: + return 1 +def not_precomputed(): + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8, r9 :: object + r10, not_optimized :: int + r11 :: bit +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = PyObject_GetIter(r4) +L1: + r9 = PyIter_Next(r8) + if is_error(r9) goto L4 else goto L2 +L2: + r10 = unbox(int, r9) + not_optimized = r10 +L3: + goto L1 +L4: + r11 = CPy_NoErrOccured() +L5: + return 1 + diff --git a/mypyc/test-data/run-sets.test b/mypyc/test-data/run-sets.test index 98ac92d569b7..56c946933fac 100644 --- a/mypyc/test-data/run-sets.test +++ b/mypyc/test-data/run-sets.test @@ -115,3 +115,36 @@ from native import update s = {1, 2, 3} update(s, [5, 4, 3]) assert s == {1, 2, 3, 4, 5} + +[case testPrecomputedFrozenSets] +from typing import Any +from typing_extensions import Final + +CONST: Final = "CONST" +non_const = "non_const" + +def main_set(item: Any) -> bool: + return item in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def main_negated_set(item: Any) -> bool: + return item not in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def non_final_name_set(item: Any) -> bool: + return item in {non_const} + +s = set() +for i in {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}: + s.add(i) + +def test_in_set() -> None: + for item in (None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST): + assert main_set(item), f"{item!r} should be in set_main" + assert not main_negated_set(item), item + + assert non_final_name_set(non_const) + global non_const + non_const = "updated" + assert non_final_name_set("updated") + +def test_for_set() -> None: + assert not s ^ {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}, s From 2475643f7e6708ec7da45a753003c13a2baaddce Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 10 Jan 2023 13:18:14 +0000 Subject: [PATCH 173/292] [mypyc] Support attributes that override properties (#14377) Code like this is now supported by mypyc: ``` class B: @property def x(self) -> int: return 0 class C(B): x: int = 0 # Attribute overrides a property ``` The implementation generates implicit getter/setter methods for attributes as needed and puts them in the vtable. I had to change both the irbuild "prepare" pass (where we generate declarations), irbuild main pass (where we generate implicit accessor IRs), and codegen (where implicit properties aren't visible externally to CPython). Also fix a minor mypy bug related to overriding properties and multiple inheritance that I encountered. This doesn't handle glue methods yet. --- mypy/checker.py | 9 +- mypyc/codegen/emitclass.py | 16 ++- mypyc/ir/class_ir.py | 12 +- mypyc/ir/func_ir.py | 9 +- mypyc/irbuild/classdef.py | 27 ++++- mypyc/irbuild/function.py | 33 +++++- mypyc/irbuild/prepare.py | 195 ++++++++++++++++++++++--------- mypyc/irbuild/vtable.py | 2 +- mypyc/sametype.py | 4 +- mypyc/test-data/run-classes.test | 182 +++++++++++++++++++++++++++++ mypyc/test-data/run-i64.test | 68 ++++++++++- 11 files changed, 487 insertions(+), 70 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index c265ac4905fb..065758cd2be9 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2498,8 +2498,13 @@ class C(B, A[int]): ... # this is unsafe because... ok = is_equivalent(first_type, second_type) if not ok: second_node = base2[name].node - if isinstance(second_node, Decorator) and second_node.func.is_property: - ok = is_subtype(first_type, cast(CallableType, second_type).ret_type) + if ( + isinstance(second_type, FunctionLike) + and second_node is not None + and is_property(second_node) + ): + second_type = get_property_type(second_type) + ok = is_subtype(first_type, second_type) else: if first_type is None: self.msg.cannot_determine_type_in_base(name, base1.name, ctx) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 1e774bbd0185..72e16345a325 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -824,7 +824,10 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) ) - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + # Generate getter declaration emitter.emit_line("static PyObject *") emitter.emit_line( @@ -834,7 +837,7 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) # Generate property setter declaration if a setter exists - if cl.properties[prop][1]: + if setter: emitter.emit_line("static int") emitter.emit_line( "{}({} *self, PyObject *value, void *closure);".format( @@ -854,11 +857,13 @@ def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: ) ) emitter.emit_line(" NULL, NULL},") - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + emitter.emit_line(f'{{"{prop}",') emitter.emit_line(f" (getter){getter_name(cl, prop, emitter.names)},") - setter = cl.properties[prop][1] if setter: emitter.emit_line(f" (setter){setter_name(cl, prop, emitter.names)},") emitter.emit_line("NULL, NULL},") @@ -878,6 +883,9 @@ def generate_getseters(cl: ClassIR, emitter: Emitter) -> None: if i < len(cl.attributes) - 1: emitter.emit_line("") for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + rtype = getter.sig.ret_type emitter.emit_line("") generate_readonly_getter(cl, prop, rtype, getter, emitter) diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index f0f772306e60..71d61c3f0efa 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -278,10 +278,18 @@ def name_prefix(self, names: NameGenerator) -> str: def struct_name(self, names: NameGenerator) -> str: return f"{exported_name(self.fullname)}Object" - def get_method_and_class(self, name: str) -> tuple[FuncIR, ClassIR] | None: + def get_method_and_class( + self, name: str, *, prefer_method: bool = False + ) -> tuple[FuncIR, ClassIR] | None: for ir in self.mro: if name in ir.methods: - return ir.methods[name], ir + func_ir = ir.methods[name] + if not prefer_method and func_ir.decl.implicit: + # This is an implicit accessor, so there is also an attribute definition + # which the caller prefers. This happens if an attribute overrides a + # property. + return None + return func_ir, ir return None diff --git a/mypyc/ir/func_ir.py b/mypyc/ir/func_ir.py index 933230a853a8..dbb45fc7ec29 100644 --- a/mypyc/ir/func_ir.py +++ b/mypyc/ir/func_ir.py @@ -139,6 +139,7 @@ def __init__( kind: int = FUNC_NORMAL, is_prop_setter: bool = False, is_prop_getter: bool = False, + implicit: bool = False, ) -> None: self.name = name self.class_name = class_name @@ -155,7 +156,11 @@ def __init__( else: self.bound_sig = sig.bound_sig() - # this is optional because this will be set to the line number when the corresponding + # If True, not present in the mypy AST and must be synthesized during irbuild + # Currently only supported for property getters/setters + self.implicit = implicit + + # This is optional because this will be set to the line number when the corresponding # FuncIR is created self._line: int | None = None @@ -198,6 +203,7 @@ def serialize(self) -> JsonDict: "kind": self.kind, "is_prop_setter": self.is_prop_setter, "is_prop_getter": self.is_prop_getter, + "implicit": self.implicit, } # TODO: move this to FuncIR? @@ -219,6 +225,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncDecl: data["kind"], data["is_prop_setter"], data["is_prop_getter"], + data["implicit"], ) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index d49c0e580c91..4e4263458b3e 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -25,6 +25,7 @@ is_class_var, ) from mypy.types import ENUM_REMOVED_PROPS, Instance, UnboundType, get_proper_type +from mypyc.common import PROPSET_PREFIX from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import FuncDecl, FuncSignature from mypyc.ir.ops import ( @@ -53,7 +54,13 @@ object_rprimitive, ) from mypyc.irbuild.builder import IRBuilder -from mypyc.irbuild.function import handle_ext_method, handle_non_ext_method, load_type +from mypyc.irbuild.function import ( + gen_property_getter_ir, + gen_property_setter_ir, + handle_ext_method, + handle_non_ext_method, + load_type, +) from mypyc.irbuild.util import dataclass_type, get_func_def, is_constant, is_dataclass_decorator from mypyc.primitives.dict_ops import dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import py_hasattr_op, py_setattr_op @@ -151,6 +158,24 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: else: builder.error("Unsupported statement in class body", stmt.line) + # Generate implicit property setters/getters + for name, decl in ir.method_decls.items(): + if decl.implicit and decl.is_prop_getter: + getter_ir = gen_property_getter_ir(builder, decl, cdef) + builder.functions.append(getter_ir) + ir.methods[getter_ir.decl.name] = getter_ir + + setter_ir = None + setter_name = PROPSET_PREFIX + name + if setter_name in ir.method_decls: + setter_ir = gen_property_setter_ir(builder, ir.method_decls[setter_name], cdef) + builder.functions.append(setter_ir) + ir.methods[setter_name] = setter_ir + + ir.properties[name] = (getter_ir, setter_ir) + # TODO: Generate glue method if needed? + # TODO: Do we need interpreted glue methods? Maybe not? + cls_builder.finalize(ir) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 237088791bc9..5447f945db25 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -28,7 +28,7 @@ Var, ) from mypy.types import CallableType, get_proper_type -from mypyc.common import LAMBDA_NAME, SELF_NAME +from mypyc.common import LAMBDA_NAME, PROPSET_PREFIX, SELF_NAME from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import ( FUNC_CLASSMETHOD, @@ -1026,3 +1026,34 @@ def get_native_impl_ids(builder: IRBuilder, singledispatch_func: FuncDef) -> dic """ impls = builder.singledispatch_impls[singledispatch_func] return {impl: i for i, (typ, impl) in enumerate(impls) if not is_decorated(builder, impl)} + + +def gen_property_getter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef) -> FuncIR: + """Generate an implicit trivial property getter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) + builder.add(Return(value)) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) + + +def gen_property_setter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef) -> FuncIR: + """Generate an implicit trivial property setter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + value_reg = builder.add_argument("value", func_decl.sig.args[1].type) + assert name.startswith(PROPSET_PREFIX) + attr_name = name[len(PROPSET_PREFIX) :] + builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) + builder.add(Return(builder.none())) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 2399647374c0..eb8288b84818 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -50,7 +50,7 @@ RuntimeArg, ) from mypyc.ir.ops import DeserMaps -from mypyc.ir.rtypes import RInstance, dict_rprimitive, tuple_rprimitive +from mypyc.ir.rtypes import RInstance, RType, dict_rprimitive, none_rprimitive, tuple_rprimitive from mypyc.irbuild.mapper import Mapper from mypyc.irbuild.util import ( get_func_def, @@ -98,6 +98,12 @@ def build_type_map( else: prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) + # Prepare implicit attribute accessors as needed if an attribute overrides a property. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + if class_ir.is_ext_class: + prepare_implicit_property_accessors(cdef.info, class_ir, module.fullname, mapper) + # Collect all the functions also. We collect from the symbol table # so that we can easily pick out the right copy of a function that # is conditionally defined. @@ -168,6 +174,8 @@ def prepare_method_def( # works correctly. decl.name = PROPSET_PREFIX + decl.name decl.is_prop_setter = True + # Making the argument implicitly positional-only avoids unnecessary glue methods + decl.sig.args[1].pos_only = True ir.method_decls[PROPSET_PREFIX + node.name] = decl if node.func.is_property: @@ -212,6 +220,11 @@ def can_subclass_builtin(builtin_base: str) -> bool: def prepare_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper ) -> None: + """Populate the interface-level information in a class IR. + + This includes attribute and method declarations, and the MRO, among other things, but + method bodies are generated in a later pass. + """ ir = mapper.type_to_ir[cdef.info] info = cdef.info @@ -223,8 +236,68 @@ def prepare_class_def( # Supports copy.copy and pickle (including subclasses) ir._serializable = True - # We sort the table for determinism here on Python 3.5 - for name, node in sorted(info.names.items()): + # Check for subclassing from builtin types + for cls in info.mro: + # Special case exceptions and dicts + # XXX: How do we handle *other* things?? + if cls.fullname == "builtins.BaseException": + ir.builtin_base = "PyBaseExceptionObject" + elif cls.fullname == "builtins.dict": + ir.builtin_base = "PyDictObject" + elif cls.fullname.startswith("builtins."): + if not can_subclass_builtin(cls.fullname): + # Note that if we try to subclass a C extension class that + # isn't in builtins, bad things will happen and we won't + # catch it here! But this should catch a lot of the most + # common pitfalls. + errors.error( + "Inheriting from most builtin types is unimplemented", path, cdef.line + ) + + # Set up the parent class + bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] + if not all(c.is_trait for c in bases[1:]): + errors.error("Non-trait bases must appear first in parent list", path, cdef.line) + ir.traits = [c for c in bases if c.is_trait] + + mro = [] # All mypyc base classes + base_mro = [] # Non-trait mypyc base classes + for cls in info.mro: + if cls not in mapper.type_to_ir: + if cls.fullname != "builtins.object": + ir.inherits_python = True + continue + base_ir = mapper.type_to_ir[cls] + if not base_ir.is_trait: + base_mro.append(base_ir) + mro.append(base_ir) + + if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: + ir.inherits_python = True + + base_idx = 1 if not ir.is_trait else 0 + if len(base_mro) > base_idx: + ir.base = base_mro[base_idx] + ir.mro = mro + ir.base_mro = base_mro + + prepare_methods_and_attributes(cdef, ir, path, module_name, errors, mapper) + prepare_init_method(cdef, ir, module_name, mapper) + + for base in bases: + if base.children is not None: + base.children.append(ir) + + if is_dataclass(cdef): + ir.is_augmented = True + + +def prepare_methods_and_attributes( + cdef: ClassDef, ir: ClassIR, path: str, module_name: str, errors: Errors, mapper: Mapper +) -> None: + """Populate attribute and method declarations.""" + info = cdef.info + for name, node in info.names.items(): # Currently all plugin generated methods are dummies and not included. if node.plugin_generated: continue @@ -249,27 +322,73 @@ def prepare_class_def( assert node.node.impl prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) - # Check for subclassing from builtin types - for cls in info.mro: - # Special case exceptions and dicts - # XXX: How do we handle *other* things?? - if cls.fullname == "builtins.BaseException": - ir.builtin_base = "PyBaseExceptionObject" - elif cls.fullname == "builtins.dict": - ir.builtin_base = "PyDictObject" - elif cls.fullname.startswith("builtins."): - if not can_subclass_builtin(cls.fullname): - # Note that if we try to subclass a C extension class that - # isn't in builtins, bad things will happen and we won't - # catch it here! But this should catch a lot of the most - # common pitfalls. - errors.error( - "Inheriting from most builtin types is unimplemented", path, cdef.line - ) - if ir.builtin_base: ir.attributes.clear() + +def prepare_implicit_property_accessors( + info: TypeInfo, ir: ClassIR, module_name: str, mapper: Mapper +) -> None: + for base in ir.base_mro: + for name, attr_rtype in base.attributes.items(): + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + + +def add_property_methods_for_attribute_if_needed( + info: TypeInfo, + ir: ClassIR, + attr_name: str, + attr_rtype: RType, + module_name: str, + mapper: Mapper, +) -> None: + """Add getter and/or setter for attribute if defined as property in a base class. + + Only add declarations. The body IR will be synthesized later during irbuild. + """ + for base in info.mro[1:]: + if base in mapper.type_to_ir: + n = base.names.get(attr_name) + if n is None: + continue + node = n.node + if isinstance(node, Decorator) and node.name not in ir.method_decls: + # Defined as a read-only property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + elif isinstance(node, OverloadedFuncDef) and is_valid_multipart_property_def(node): + # Defined as a read-write property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + + +def add_getter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + sig = FuncSignature([self_arg], attr_rtype) + decl = FuncDecl(attr_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_getter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[attr_name] = decl + ir.property_types[attr_name] = attr_rtype # TODO: Needed?? + + +def add_setter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + value_arg = RuntimeArg("value", attr_rtype, pos_only=True) + sig = FuncSignature([self_arg, value_arg], none_rprimitive) + setter_name = PROPSET_PREFIX + attr_name + decl = FuncDecl(setter_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_setter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[setter_name] = decl + + +def prepare_init_method(cdef: ClassDef, ir: ClassIR, module_name: str, mapper: Mapper) -> None: # Set up a constructor decl init_node = cdef.info["__init__"].node if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): @@ -298,40 +417,6 @@ def prepare_class_def( ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) mapper.func_to_decl[cdef.info] = ir.ctor - # Set up the parent class - bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] - if not all(c.is_trait for c in bases[1:]): - errors.error("Non-trait bases must appear first in parent list", path, cdef.line) - ir.traits = [c for c in bases if c.is_trait] - - mro = [] - base_mro = [] - for cls in info.mro: - if cls not in mapper.type_to_ir: - if cls.fullname != "builtins.object": - ir.inherits_python = True - continue - base_ir = mapper.type_to_ir[cls] - if not base_ir.is_trait: - base_mro.append(base_ir) - mro.append(base_ir) - - if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: - ir.inherits_python = True - - base_idx = 1 if not ir.is_trait else 0 - if len(base_mro) > base_idx: - ir.base = base_mro[base_idx] - ir.mro = mro - ir.base_mro = base_mro - - for base in bases: - if base.children is not None: - base.children.append(ir) - - if is_dataclass(cdef): - ir.is_augmented = True - def prepare_non_ext_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper diff --git a/mypyc/irbuild/vtable.py b/mypyc/irbuild/vtable.py index a02cd622cee1..13bc4d46e15d 100644 --- a/mypyc/irbuild/vtable.py +++ b/mypyc/irbuild/vtable.py @@ -62,7 +62,7 @@ def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: # (This may not be the method in the entry, if it was overridden.) orig_parent_method = entry.cls.get_method(entry.name) assert orig_parent_method - method_cls = cls.get_method_and_class(entry.name) + method_cls = cls.get_method_and_class(entry.name, prefer_method=True) if method_cls: child_method, defining_cls = method_cls # TODO: emit a wrapper for __init__ that raises or something diff --git a/mypyc/sametype.py b/mypyc/sametype.py index 056ed683e5b8..1b811d4e9041 100644 --- a/mypyc/sametype.py +++ b/mypyc/sametype.py @@ -35,7 +35,9 @@ def is_same_method_signature(a: FuncSignature, b: FuncSignature) -> bool: len(a.args) == len(b.args) and is_same_type(a.ret_type, b.ret_type) and all( - is_same_type(t1.type, t2.type) and t1.name == t2.name and t1.optional == t2.optional + is_same_type(t1.type, t2.type) + and ((t1.pos_only and t2.pos_only) or t1.name == t2.name) + and t1.optional == t2.optional for t1, t2 in zip(a.args[1:], b.args[1:]) ) ) diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index 2af519dc7aa8..92ec3873bf38 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -1968,6 +1968,188 @@ import other_interpreted [out] +[case testAttributeOverridesProperty] +from typing import Any +from mypy_extensions import trait + +@trait +class T1: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class C1(T1): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C1() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T1 = C1() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class B2: + @property + def x(self) -> int: + return 11 + + @property + def y(self) -> int: + return 25 + +class C2(B2): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_class_implemented_as_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + b: B2 = C2() + assert b.y == 4 + b = c + assert b.x == 5 + assert b.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T3: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class B3: + x: int = 1 + y: int = 4 + +class C3(B3, T3): + pass + +def test_read_only_property_implemented_as_attribute_indirectly() -> None: + c = C3() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T3 = C3() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T4: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class C4(T4): + x: int = 1 + y: int = 4 + +def test_read_write_property_implemented_as_attribute() -> None: + c = C4() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T4 = C4() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T5: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class B5: + x: int = 1 + y: int = 4 + +class BB5(B5): + pass + +class C5(BB5, T5): + pass + +def test_read_write_property_indirectly_implemented_as_attribute() -> None: + c = C5() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T5 = C5() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + [case testSubclassAttributeAccess] from mypy_extensions import trait diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index 893b3f808f24..f0c664385d7b 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -1223,12 +1223,15 @@ def test_many_locals() -> None: assert a32 == 55 assert a33 == 20 -[case testI64GlueMethods] +[case testI64GlueMethodsAndInheritance] +from typing import Any from typing_extensions import Final MYPY = False if MYPY: - from mypy_extensions import i64 + from mypy_extensions import i64, trait + +from testutil import assertRaises MAGIC: Final = -113 @@ -1266,3 +1269,64 @@ def test_derived_switches_arg_to_have_default() -> None: b: Base = Derived() assert b.hoho(5) == 3 assert b.hoho(MAGIC) == MAGIC - 2 + +@trait +class T: + @property + def x(self) -> i64: ... + @property + def y(self) -> i64: ... + +class C(T): + x: i64 = 1 + y: i64 = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T = C() + assert t.y == 4 + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D(T): + xx: i64 + + @property + def x(self) -> i64: + return self.xx + + @property + def y(self) -> i64: + raise TypeError + +def test_read_only_property_in_trait_implemented_as_property() -> None: + d = D() + d.xx = 5 + assert d.x == 5 + d.xx = MAGIC + assert d.x == MAGIC + with assertRaises(TypeError): + d.y + t: T = d + assert t.x == MAGIC + d.xx = 6 + assert t.x == 6 + with assertRaises(TypeError): + t.y From e96527598a4271e0a8d59c4abb8bde36e927030a Mon Sep 17 00:00:00 2001 From: Tim Geypens Date: Tue, 10 Jan 2023 22:55:21 +0100 Subject: [PATCH 174/292] Don't read scripts without extensions as modules in namespace mode (#14335) The `FindModuleCache` currently matches files without an extension when `--namespace_packages` is enabled while [the docs](https://mypy.readthedocs.io/en/stable/running_mypy.html#mapping-file-paths-to-modules) don't mention that this should be the case. The "near-miss" logic collects candidates for modules, which could correctly include a _directory_ `foo/bar/baz` when looking for `foo/bar/baz`. However, the current logic also picks up a _file_ `foo/bar/baz`. This means that, if both a file `foo/bar/baz` and `foo/bar/baz.py` exist, the first one is actually picked, resulting in unexpected behaviour. The condition that checks `exists_case` on `foo/bar/baz` should also check that it is indeed a directory by checking that it is not a file. I'm open to different fixes of course, but this seemed like the most obvious and least impactful change to make. This PR modifies 2 tests: * add `test-data/packages/modulefinder/pkg1/a` to verify that `ModuleFinderSuite.test__no_namespace_packages__find_a_in_pkg1` is indeed working correctly even without the patch because it's not running in namespace mode. * add `test-data/packages/modulefinder/nsx-pkg3/nsx/c/c`, making `ModuleFinderSuite.test__find_nsx_c_c_in_pkg3` fail, which the patch fixes. To give one real-world example of this scenario: Bazel's Python rules construct a wrapper-script with the same name as the main Python-file without the extension for a `py_binary`-target. If some other Python rule depends on this `//foo/bar:baz` `py_binary`-target, it sees both `foo/bar/baz` and `foo/bar/baz.py` in the same directory, incorrectly picking up the wrapper-script instead of the module. Dependencies on a `py_binary` might be a bit of an edge-case, but Python execution of these targets does pick up the right file, so Mypy should probably as well. --- mypy/modulefinder.py | 6 +++++- test-data/packages/modulefinder/nsx-pkg3/nsx/c/c | 0 test-data/packages/modulefinder/pkg1/a | 0 3 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 test-data/packages/modulefinder/nsx-pkg3/nsx/c/c create mode 100644 test-data/packages/modulefinder/pkg1/a diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index b2abb4847705..265d76ed5bb6 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -507,7 +507,11 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # In namespace mode, register a potential namespace package if self.options and self.options.namespace_packages: - if fscache.exists_case(base_path, dir_prefix) and not has_init: + if ( + not has_init + and fscache.exists_case(base_path, dir_prefix) + and not fscache.isfile_case(base_path, dir_prefix) + ): near_misses.append((base_path, dir_prefix)) # No package, look for module. diff --git a/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c b/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder/pkg1/a b/test-data/packages/modulefinder/pkg1/a new file mode 100644 index 000000000000..e69de29bb2d1 From 13e6617448ed5619c8b3512c5eed302f6d9f0f5b Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Tue, 10 Jan 2023 19:25:42 -0500 Subject: [PATCH 175/292] =?UTF-8?q?Move=20truthy-function=20docs=20from=20?= =?UTF-8?q?=E2=80=9Coptional=20checks=E2=80=9D=20to=20=E2=80=9Cenabled=20b?= =?UTF-8?q?y=20default=E2=80=9D=20(#14380)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This error was enabled by default since its introduction (#13686); document it in the correct section. Signed-off-by: Anders Kaseorg --- docs/source/error_code_list.rst | 13 +++++++++++++ docs/source/error_code_list2.rst | 13 ------------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 264badc03107..1a39bf8feb6c 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -764,6 +764,19 @@ the provided type. assert_type([1], list[str]) # Error +Check that function isn't used in boolean context [truthy-function] +------------------------------------------------------------------- + +Functions will always evaluate to true in boolean contexts. + +.. code-block:: python + + def f(): + ... + + if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + pass + Report syntax errors [syntax] ----------------------------- diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 0cf96ba9c2e7..85ab76da5cee 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -275,19 +275,6 @@ what the author might have intended. Of course it's possible that ``transform`` and so there is no error in practice. In such case, it is recommended to annotate ``items: Collection[int]``. -Check that function isn't used in boolean context [truthy-function] -------------------------------------------------------------------- - -Functions will always evaluate to true in boolean contexts. - -.. code-block:: python - - def f(): - ... - - if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] - pass - .. _ignore-without-code: Check that ``# type: ignore`` include an error code [ignore-without-code] From c4a5f5620763dcfe2b95847806a889d560e2aa51 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 11 Jan 2023 17:23:13 +0000 Subject: [PATCH 176/292] [mypyc] Faster bool and integer conversions (#14422) Speed up various conversions between bool and integer types. These cases are covered: * `bool(x)` for various types * `int(x)` for `bool` and integer arguments * Implicit coercion from `bool` to an integer type Support both regular `int` values and native int values. (Various small optimizations, including these, together netted a 6% performance improvement in self check.) --- mypyc/irbuild/ll_builder.py | 93 ++++++++++++++----- mypyc/irbuild/specialize.py | 28 ++++++ mypyc/test-data/irbuild-basic.test | 34 ++++--- mypyc/test-data/irbuild-bool.test | 144 +++++++++++++++++++++++++++++ mypyc/test-data/irbuild-i64.test | 26 ++++++ mypyc/test-data/irbuild-int.test | 19 ++++ mypyc/test-data/run-bools.test | 57 ++++++++++-- mypyc/test-data/run-i64.test | 41 ++++++++ mypyc/test-data/run-integers.test | 24 +++++ mypyc/test-data/run-strings.test | 5 + mypyc/test/test_irbuild.py | 1 + 11 files changed, 431 insertions(+), 41 deletions(-) create mode 100644 mypyc/test-data/irbuild-bool.test diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 88b35a95c08c..019f709f0acc 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -326,6 +326,17 @@ def coerce( ): # Equivalent types return src + elif ( + is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) + ) and is_int_rprimitive(target_type): + shifted = self.int_op( + bool_rprimitive, src, Integer(1, bool_rprimitive), IntOp.LEFT_SHIFT + ) + return self.add(Extend(shifted, int_rprimitive, signed=False)) + elif ( + is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) + ) and is_fixed_width_rtype(target_type): + return self.add(Extend(src, target_type, signed=False)) else: # To go from one unboxed type to another, we go through a boxed # in-between value, for simplicity. @@ -1642,35 +1653,38 @@ def shortcircuit_helper( self.activate_block(next_block) return target - def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: - if is_runtime_subtype(value.type, int_rprimitive): + def bool_value(self, value: Value) -> Value: + """Return bool(value). + + The result type can be bit_rprimitive or bool_rprimitive. + """ + if is_bool_rprimitive(value.type) or is_bit_rprimitive(value.type): + result = value + elif is_runtime_subtype(value.type, int_rprimitive): zero = Integer(0, short_int_rprimitive) - self.compare_tagged_condition(value, zero, "!=", true, false, value.line) - return + result = self.comparison_op(value, zero, ComparisonOp.NEQ, value.line) elif is_fixed_width_rtype(value.type): zero = Integer(0, value.type) - value = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) + result = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) elif is_same_type(value.type, str_rprimitive): - value = self.call_c(str_check_if_true, [value], value.line) + result = self.call_c(str_check_if_true, [value], value.line) elif is_same_type(value.type, list_rprimitive) or is_same_type( value.type, dict_rprimitive ): length = self.builtin_len(value, value.line) zero = Integer(0) - value = self.binary_op(length, zero, "!=", value.line) + result = self.binary_op(length, zero, "!=", value.line) elif ( isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class and value.type.class_ir.has_method("__bool__") ): # Directly call the __bool__ method on classes that have it. - value = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) + result = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) else: value_type = optional_value_type(value.type) if value_type is not None: - is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) - branch = Branch(is_none, true, false, Branch.BOOL) - self.add(branch) + not_none = self.translate_is_op(value, self.none_object(), "is not", value.line) always_truthy = False if isinstance(value_type, RInstance): # check whether X.__bool__ is always just the default (object.__bool__) @@ -1679,18 +1693,55 @@ def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> ) and value_type.class_ir.is_method_final("__bool__"): always_truthy = True - if not always_truthy: - # Optional[X] where X may be falsey and requires a check - branch.true = BasicBlock() - self.activate_block(branch.true) + if always_truthy: + result = not_none + else: + # "X | None" where X may be falsey and requires a check + result = Register(bit_rprimitive) + true, false, end = BasicBlock(), BasicBlock(), BasicBlock() + branch = Branch(not_none, true, false, Branch.BOOL) + self.add(branch) + self.activate_block(true) # unbox_or_cast instead of coerce because we want the # type to change even if it is a subtype. remaining = self.unbox_or_cast(value, value_type, value.line) - self.add_bool_branch(remaining, true, false) - return - elif not is_bool_rprimitive(value.type) and not is_bit_rprimitive(value.type): - value = self.call_c(bool_op, [value], value.line) - self.add(Branch(value, true, false, Branch.BOOL)) + as_bool = self.bool_value(remaining) + self.add(Assign(result, as_bool)) + self.goto(end) + self.activate_block(false) + self.add(Assign(result, Integer(0, bit_rprimitive))) + self.goto(end) + self.activate_block(end) + else: + result = self.call_c(bool_op, [value], value.line) + return result + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + opt_value_type = optional_value_type(value.type) + if opt_value_type is None: + bool_value = self.bool_value(value) + self.add(Branch(bool_value, true, false, Branch.BOOL)) + else: + # Special-case optional types + is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) + branch = Branch(is_none, true, false, Branch.BOOL) + self.add(branch) + always_truthy = False + if isinstance(opt_value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if not opt_value_type.class_ir.has_method( + "__bool__" + ) and opt_value_type.class_ir.is_method_final("__bool__"): + always_truthy = True + + if not always_truthy: + # Optional[X] where X may be falsey and requires a check + branch.true = BasicBlock() + self.activate_block(branch.true) + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, opt_value_type, value.line) + self.add_bool_branch(remaining, true, false) def call_c( self, @@ -1795,7 +1846,7 @@ def matching_call_c( return target return None - def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: + def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> Value: """Generate a native integer binary op. Use native/C semantics, which sometimes differ from Python diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index 5810482cd43d..06babd2f7e1a 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -51,7 +51,10 @@ dict_rprimitive, int32_rprimitive, int64_rprimitive, + int_rprimitive, + is_bool_rprimitive, is_dict_rprimitive, + is_fixed_width_rtype, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -688,3 +691,28 @@ def translate_i32(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value val = builder.accept(arg) return builder.coerce(val, int32_rprimitive, expr.line) return None + + +@specialize_function("builtins.int") +def translate_int(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if ( + is_bool_rprimitive(arg_type) + or is_int_rprimitive(arg_type) + or is_fixed_width_rtype(arg_type) + ): + src = builder.accept(arg) + return builder.coerce(src, int_rprimitive, expr.line) + return None + + +@specialize_function("builtins.bool") +def translate_bool(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + src = builder.accept(arg) + return builder.builder.bool_value(src) diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index f72720e59b18..a06977d037b2 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -1108,7 +1108,9 @@ L0: return 1 [case testCallableTypes] -from typing import Callable +from typing import Callable, Any +from m import f + def absolute_value(x: int) -> int: return x if x > 0 else -x @@ -1116,7 +1118,7 @@ def call_native_function(x: int) -> int: return absolute_value(x) def call_python_function(x: int) -> int: - return int(x) + return f(x) def return_float() -> float: return 5.0 @@ -1127,6 +1129,9 @@ def return_callable_type() -> Callable[[], float]: def call_callable_type() -> float: f = return_callable_type() return f() +[file m.py] +def f(x: int) -> int: + return x [out] def absolute_value(x): x :: int @@ -1158,14 +1163,18 @@ L0: return r0 def call_python_function(x): x :: int - r0, r1, r2 :: object - r3 :: int + r0 :: dict + r1 :: str + r2, r3, r4 :: object + r5 :: int L0: - r0 = load_address PyLong_Type - r1 = box(int, x) - r2 = PyObject_CallFunctionObjArgs(r0, r1, 0) - r3 = unbox(int, r2) - return r3 + r0 = __main__.globals :: static + r1 = 'f' + r2 = CPyDict_GetItem(r0, r1) + r3 = box(int, x) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + return r5 def return_float(): r0 :: float L0: @@ -3068,8 +3077,7 @@ def call_sum(l, comparison): r1, r2 :: object r3, x :: int r4, r5 :: object - r6 :: bool - r7 :: object + r6, r7 :: bool r8, r9 :: int r10 :: bit L0: @@ -3084,8 +3092,8 @@ L2: r4 = box(int, x) r5 = PyObject_CallFunctionObjArgs(comparison, r4, 0) r6 = unbox(bool, r5) - r7 = box(bool, r6) - r8 = unbox(int, r7) + r7 = r6 << 1 + r8 = extend r7: builtins.bool to builtins.int r9 = CPyTagged_Add(r0, r8) r0 = r9 L3: diff --git a/mypyc/test-data/irbuild-bool.test b/mypyc/test-data/irbuild-bool.test new file mode 100644 index 000000000000..407ab8bcda93 --- /dev/null +++ b/mypyc/test-data/irbuild-bool.test @@ -0,0 +1,144 @@ +[case testBoolToAndFromInt] +from mypy_extensions import i64 + +def bool_to_int(b: bool) -> int: + return b +def int_to_bool(n: int) -> bool: + return bool(n) +def bool_to_i64(b: bool) -> i64: + return b +def i64_to_bool(n: i64) -> bool: + return bool(n) +def bit_to_int(n1: i64, n2: i64) -> int: + return bool(n1 == n2) +def bit_to_i64(n1: i64, n2: i64) -> i64: + return bool(n1 == n2) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_bool(n): + n :: int + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def i64_to_bool(n): + n :: int64 + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bit_to_int(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: bool + r2 :: int +L0: + r0 = n1 == n2 + r1 = r0 << 1 + r2 = extend r1: builtins.bool to builtins.int + return r2 +def bit_to_i64(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: int64 +L0: + r0 = n1 == n2 + r1 = extend r0: bit to int64 + return r1 + +[case testConversionToBool] +from typing import List, Optional + +class C: pass +class D: + def __bool__(self) -> bool: + return True + +def list_to_bool(l: List[str]) -> bool: + return bool(l) + +def always_truthy_instance_to_bool(o: C) -> bool: + return bool(o) + +def instance_to_bool(o: D) -> bool: + return bool(o) + +def optional_truthy_to_bool(o: Optional[C]) -> bool: + return bool(o) + +def optional_maybe_falsey_to_bool(o: Optional[D]) -> bool: + return bool(o) +[out] +def D.__bool__(self): + self :: __main__.D +L0: + return 1 +def list_to_bool(l): + l :: list + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: bit +L0: + r0 = get_element_ptr l ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive l + r2 = r1 << 1 + r3 = r2 != 0 + return r3 +def always_truthy_instance_to_bool(o): + o :: __main__.C + r0 :: int32 + r1 :: bit + r2 :: bool +L0: + r0 = PyObject_IsTrue(o) + r1 = r0 >= 0 :: signed + r2 = truncate r0: int32 to builtins.bool + return r2 +def instance_to_bool(o): + o :: __main__.D + r0 :: bool +L0: + r0 = o.__bool__() + return r0 +def optional_truthy_to_bool(o): + o :: union[__main__.C, None] + r0 :: object + r1 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + return r1 +def optional_maybe_falsey_to_bool(o): + o :: union[__main__.D, None] + r0 :: object + r1 :: bit + r2 :: __main__.D + r3 :: bool + r4 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = cast(__main__.D, o) + r3 = r2.__bool__() + r4 = r3 + goto L3 +L2: + r4 = 0 +L3: + return r4 diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index 9c942ea75219..c6b62996bc80 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1559,6 +1559,32 @@ L2: L3: return r3 +[case testI64ExplicitConversionToInt_64bit] +from mypy_extensions import i64 + +def f(x: i64) -> int: + return int(x) +[out] +def f(x): + x :: int64 + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = x <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = x << 1 + r3 = r4 +L4: + return r3 + [case testI64ExplicitConversionFromLiteral] from mypy_extensions import i64 diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index e193c16ef979..aebadce5650e 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -203,3 +203,22 @@ L0: def f5(): L0: return -2 + +[case testConvertIntegralToInt] +def bool_to_int(b: bool) -> int: + return int(b) + +def int_to_int(n: int) -> int: + return int(n) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_int(n): + n :: int +L0: + return n diff --git a/mypyc/test-data/run-bools.test b/mypyc/test-data/run-bools.test index a7afc5f2b1a2..e23b35d82fc5 100644 --- a/mypyc/test-data/run-bools.test +++ b/mypyc/test-data/run-bools.test @@ -15,6 +15,8 @@ True False [case testBoolOps] +from typing import Optional, Any + def f(x: bool) -> bool: if x: return False @@ -27,8 +29,8 @@ def test_if() -> None: def test_bitwise_and() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t & t == True assert t & f == False assert f & t == False @@ -40,8 +42,8 @@ def test_bitwise_and() -> None: def test_bitwise_or() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t | t == True assert t | f == True assert f | t == True @@ -53,8 +55,8 @@ def test_bitwise_or() -> None: def test_bitwise_xor() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t ^ t == False assert t ^ f == True assert f ^ t == True @@ -66,7 +68,6 @@ def test_bitwise_xor() -> None: f ^= f assert f == False -[case testIsinstanceBool] def test_isinstance_bool() -> None: a = True b = 1.0 @@ -76,3 +77,45 @@ def test_isinstance_bool() -> None: assert isinstance(b, bool) == False assert isinstance(c, bool) == False assert isinstance(d, bool) == True + +class C: pass +class D: + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +class E: pass +class F(E): + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +def optional_to_bool1(o: Optional[C]) -> bool: + return bool(o) + +def optional_to_bool2(o: Optional[D]) -> bool: + return bool(o) + +def optional_to_bool3(o: Optional[E]) -> bool: + return bool(o) + +def test_optional_to_bool() -> None: + assert not optional_to_bool1(None) + assert optional_to_bool1(C()) + assert not optional_to_bool2(None) + assert not optional_to_bool2(D(False)) + assert optional_to_bool2(D(True)) + assert not optional_to_bool3(None) + assert optional_to_bool3(E()) + assert not optional_to_bool3(F(False)) + assert optional_to_bool3(F(True)) + +def test_any_to_bool() -> None: + a: Any = int() + b: Any = a + 1 + assert not bool(a) + assert bool(b) diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index f0c664385d7b..c2a218156e66 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -62,6 +62,37 @@ def test_comparisons() -> None: assert one != two assert not (one != one2) +def is_true(x: i64) -> bool: + if x: + return True + else: + return False + +def is_true2(x: i64) -> bool: + return bool(x) + +def is_false(x: i64) -> bool: + if not x: + return True + else: + return False + +def test_i64_as_bool() -> None: + assert not is_true(0) + assert not is_true2(0) + assert is_false(0) + for x in 1, 55, -1, -7, 1 << 40, -(1 << 50): + assert is_true(x) + assert is_true2(x) + assert not is_false(x) + +def bool_as_i64(b: bool) -> i64: + return b + +def test_bool_as_i64() -> None: + assert bool_as_i64(False) == 0 + assert bool_as_i64(True) == 1 + def div_by_3(x: i64) -> i64: return x // 3 @@ -229,6 +260,16 @@ def test_coerce_to_and_from_int() -> None: m: int = x assert m == n +def test_coerce_to_and_from_int2() -> None: + for shift in range(0, 64): + for sign in 1, -1: + for delta in range(-5, 5): + n = sign * (1 << shift) + delta + if -(1 << 63) <= n < (1 << 63): + x: i64 = i64(n) + m: int = int(x) + assert m == n + def test_explicit_conversion_to_i64() -> None: x = i64(5) assert x == 5 diff --git a/mypyc/test-data/run-integers.test b/mypyc/test-data/run-integers.test index 74e7cd6b8fb7..c65f36110b46 100644 --- a/mypyc/test-data/run-integers.test +++ b/mypyc/test-data/run-integers.test @@ -353,6 +353,9 @@ def is_true(x: int) -> bool: else: return False +def is_true2(x: int) -> bool: + return bool(x) + def is_false(x: int) -> bool: if not x: return True @@ -361,11 +364,32 @@ def is_false(x: int) -> bool: def test_int_as_bool() -> None: assert not is_true(0) + assert not is_true2(0) assert is_false(0) for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): assert is_true(x) + assert is_true2(x) assert not is_false(x) +def bool_as_int(b: bool) -> int: + return b + +def bool_as_int2(b: bool) -> int: + return int(b) + +def test_bool_as_int() -> None: + assert bool_as_int(False) == 0 + assert bool_as_int(True) == 1 + assert bool_as_int2(False) == 0 + assert bool_as_int2(True) == 1 + +def no_op_conversion(n: int) -> int: + return int(n) + +def test_no_op_conversion() -> None: + for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): + assert no_op_conversion(x) == x + def test_divide() -> None: for x in range(-100, 100): for y in range(-100, 100): diff --git a/mypyc/test-data/run-strings.test b/mypyc/test-data/run-strings.test index c2b010bdb2bd..4a20c13ce789 100644 --- a/mypyc/test-data/run-strings.test +++ b/mypyc/test-data/run-strings.test @@ -136,6 +136,9 @@ def is_true(x: str) -> bool: else: return False +def is_true2(x: str) -> bool: + return bool(x) + def is_false(x: str) -> bool: if not x: return True @@ -145,8 +148,10 @@ def is_false(x: str) -> bool: def test_str_to_bool() -> None: assert is_false('') assert not is_true('') + assert not is_true2('') for x in 'a', 'foo', 'bar', 'some string': assert is_true(x) + assert is_true2(x) assert not is_false(x) def test_str_min_max() -> None: diff --git a/mypyc/test/test_irbuild.py b/mypyc/test/test_irbuild.py index 8928f94d6211..cb5e690eed55 100644 --- a/mypyc/test/test_irbuild.py +++ b/mypyc/test/test_irbuild.py @@ -24,6 +24,7 @@ files = [ "irbuild-basic.test", "irbuild-int.test", + "irbuild-bool.test", "irbuild-lists.test", "irbuild-tuple.test", "irbuild-dict.test", From 3ba22ee43ed8c07667521bf8e97910041d4b31c1 Mon Sep 17 00:00:00 2001 From: Richard Si Date: Fri, 13 Jan 2023 03:23:59 -0500 Subject: [PATCH 177/292] Use class-based NamedTuples in mypyc (#14442) --- mypyc/ir/class_ir.py | 9 +++++---- mypyc/ir/ops.py | 6 +++--- mypyc/primitives/int_ops.py | 14 +++++-------- mypyc/primitives/registry.py | 38 +++++++++++++++++------------------- 4 files changed, 31 insertions(+), 36 deletions(-) diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 71d61c3f0efa..9b73eea3f8e6 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -69,10 +69,11 @@ # placed in the class's shadow vtable (if it has one). -VTableMethod = NamedTuple( - "VTableMethod", - [("cls", "ClassIR"), ("name", str), ("method", FuncIR), ("shadow_method", Optional[FuncIR])], -) +class VTableMethod(NamedTuple): + cls: "ClassIR" + name: str + method: FuncIR + shadow_method: Optional[FuncIR] VTableEntries = List[VTableMethod] diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index cc6f542c3e23..51a0bffcf3f1 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -1450,6 +1450,6 @@ def visit_keep_alive(self, op: KeepAlive) -> T: # # (Serialization and deserialization *will* be used for incremental # compilation but so far it is not hooked up to anything.) -DeserMaps = NamedTuple( - "DeserMaps", [("classes", Dict[str, "ClassIR"]), ("functions", Dict[str, "FuncIR"])] -) +class DeserMaps(NamedTuple): + classes: Dict[str, "ClassIR"] + functions: Dict[str, "FuncIR"] diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 55ef16ef5466..382bceb217f4 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -160,15 +160,11 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: # c_func_description: the C function to call when operands are tagged integers # c_func_negated: whether to negate the C function call's result # c_func_swap_operands: whether to swap lhs and rhs when call the function -IntComparisonOpDescription = NamedTuple( - "IntComparisonOpDescription", - [ - ("binary_op_variant", int), - ("c_func_description", CFunctionDescription), - ("c_func_negated", bool), - ("c_func_swap_operands", bool), - ], -) +class IntComparisonOpDescription(NamedTuple): + binary_op_variant: int + c_func_description: CFunctionDescription + c_func_negated: bool + c_func_swap_operands: bool # Equals operation on two boxed tagged integers diff --git a/mypyc/primitives/registry.py b/mypyc/primitives/registry.py index d7d171b72cca..1e2cf2695ee7 100644 --- a/mypyc/primitives/registry.py +++ b/mypyc/primitives/registry.py @@ -47,29 +47,27 @@ # is only used for primitives. We translate it away during IR building. ERR_NEG_INT: Final = 10 -CFunctionDescription = NamedTuple( - "CFunctionDescription", - [ - ("name", str), - ("arg_types", List[RType]), - ("return_type", RType), - ("var_arg_type", Optional[RType]), - ("truncated_type", Optional[RType]), - ("c_function_name", str), - ("error_kind", int), - ("steals", StealsDescription), - ("is_borrowed", bool), - ("ordering", Optional[List[int]]), - ("extra_int_constants", List[Tuple[int, RType]]), - ("priority", int), - ], -) + +class CFunctionDescription(NamedTuple): + name: str + arg_types: List[RType] + return_type: RType + var_arg_type: Optional[RType] + truncated_type: Optional[RType] + c_function_name: str + error_kind: int + steals: StealsDescription + is_borrowed: bool + ordering: Optional[List[int]] + extra_int_constants: List[Tuple[int, RType]] + priority: int # A description for C load operations including LoadGlobal and LoadAddress -LoadAddressDescription = NamedTuple( - "LoadAddressDescription", [("name", str), ("type", RType), ("src", str)] -) # name of the target to load +class LoadAddressDescription(NamedTuple): + name: str + type: RType + src: str # name of the target to load # CallC op for method call(such as 'str.join') From 87bb1dc912c9fcc449206728d6ca37afb7deb0bd Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Fri, 13 Jan 2023 19:45:40 +1000 Subject: [PATCH 178/292] =?UTF-8?q?(=F0=9F=94=BC)=20update=20dependencies?= =?UTF-8?q?=20(#14433)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit update black and isort, but not flake8, it requires python 3.8 Co-authored-by: KotlinIsland --- .pre-commit-config.yaml | 8 ++++---- test-requirements.txt | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af1bb320be6d..0de686b7eb01 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,10 @@ repos: - repo: https://github.com/psf/black - rev: 22.10.0 # must match test-requirements.txt + rev: 22.12.0 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/pycqa/isort - rev: 5.10.1 # must match test-requirements.txt + rev: 5.11.4 # must match test-requirements.txt hooks: - id: isort - repo: https://github.com/pycqa/flake8 @@ -12,5 +12,5 @@ repos: hooks: - id: flake8 additional_dependencies: - - flake8-bugbear==22.9.23 # must match test-requirements.txt - - flake8-noqa==1.2.9 # must match test-requirements.txt + - flake8-bugbear==22.12.6 # must match test-requirements.txt + - flake8-noqa==1.3.0 # must match test-requirements.txt diff --git a/test-requirements.txt b/test-requirements.txt index 76255044e2dd..ac965f4abc52 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,12 +1,12 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==22.10.0 # must match version in .pre-commit-config.yaml +black==22.12.0 # must match version in .pre-commit-config.yaml filelock>=3.3.0 flake8==5.0.4 # must match version in .pre-commit-config.yaml -flake8-bugbear==22.9.23 # must match version in .pre-commit-config.yaml -flake8-noqa==1.2.9 # must match version in .pre-commit-config.yaml -isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml +flake8-bugbear==22.12.6 # must match version in .pre-commit-config.yaml +flake8-noqa==1.3.0 # must match version in .pre-commit-config.yaml +isort[colors]==5.11.4 # must match version in .pre-commit-config.yaml lxml>=4.9.1; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 From 1c5eeb817998518f1789b06dcafd7c207f8553d6 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Fri, 13 Jan 2023 19:47:02 +1000 Subject: [PATCH 179/292] =?UTF-8?q?(=F0=9F=A7=B9)=20cleanup=20config=20fil?= =?UTF-8?q?e=20(#14432)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit These two options in the config file are redundant. Co-authored-by: KotlinIsland --- mypy_self_check.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 719148240c89..d20fcd60a9cb 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -1,8 +1,6 @@ [mypy] strict = True -warn_no_return = True -strict_optional = True disallow_any_unimported = True show_traceback = True pretty = True From 28e5436bbba16c8a217dace56b951898e53532f9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 13 Jan 2023 11:39:21 +0000 Subject: [PATCH 180/292] Refactoring: make the type of fullname str instead of Bogus[str] (#14435) The type Bogus[X] is treated as Any when the code is compiled with mypyc, while it's equivalent to X when only type checking. They are sometimes used when X is not actually the real type of a value, but changing it to the correct type would be complicated. Bogus[str] types are pretty awkward, since we are lying to the type checker and mypyc only sees Any types. An empty fullname is now represented by "" instead of None, so we no longer need a Bogus[str] type. This might break some plugins, so we should document this in release notes and the relevant issue that tracks plugin incompatibilities. (Various small optimizations, including this, together netted a 6% performance improvement in self check.) --- mypy/checker.py | 4 +- mypy/checkexpr.py | 14 +++---- mypy/nodes.py | 58 +++++++++++++++------------ mypy/partially_defined.py | 2 +- mypy/semanal.py | 12 +++--- mypy/server/aststrip.py | 2 +- mypy/server/deps.py | 14 +++---- mypy/strconv.py | 2 +- mypy/stubtest.py | 2 +- mypy/test/testsemanal.py | 2 +- mypy/tvar_scope.py | 2 +- mypy/typeanal.py | 2 +- mypyc/irbuild/builder.py | 6 +-- mypyc/irbuild/function.py | 2 +- test-data/unit/plugins/customentry.py | 2 +- 15 files changed, 65 insertions(+), 61 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 065758cd2be9..61104756b297 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2178,7 +2178,7 @@ def visit_class_def(self, defn: ClassDef) -> None: temp = self.temp_node(sig, context=decorator) fullname = None if isinstance(decorator, RefExpr): - fullname = decorator.fullname + fullname = decorator.fullname or None # TODO: Figure out how to have clearer error messages. # (e.g. "class decorator must be a function that accepts a type." @@ -4598,7 +4598,7 @@ def visit_decorator(self, e: Decorator) -> None: temp = self.temp_node(sig, context=e) fullname = None if isinstance(d, RefExpr): - fullname = d.fullname + fullname = d.fullname or None # if this is a expression like @b.a where b is an object, get the type of b # so we can pass it the method hook in the plugins object_type: Type | None = None diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index e6634e124d30..1c25b8ea7a12 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -216,8 +216,8 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" output: set[str] = set() - while isinstance(expr.node, MypyFile) or expr.fullname is not None: - if isinstance(expr.node, MypyFile) and expr.fullname is not None: + while isinstance(expr.node, MypyFile) or expr.fullname: + if isinstance(expr.node, MypyFile) and expr.fullname: # If it's None, something's wrong (perhaps due to an # import cycle or a suppressed error). For now we just # skip it. @@ -228,7 +228,7 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: if isinstance(expr.node, TypeInfo): # Reference to a class or a nested class output.update(split_module_names(expr.node.module_name)) - elif expr.fullname is not None and "." in expr.fullname and not is_suppressed_import: + elif "." in expr.fullname and not is_suppressed_import: # Everything else (that is not a silenced import within a class) output.add(expr.fullname.rsplit(".", 1)[0]) break @@ -526,7 +526,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # There are two special cases where plugins might act: # * A "static" reference/alias to a class or function; # get_function_hook() will be invoked for these. - fullname = e.callee.fullname + fullname = e.callee.fullname or None if isinstance(e.callee.node, TypeAlias): target = get_proper_type(e.callee.node.target) if isinstance(target, Instance): @@ -536,7 +536,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # get_method_hook() and get_method_signature_hook() will # be invoked for these. if ( - fullname is None + not fullname and isinstance(e.callee, MemberExpr) and self.chk.has_type(e.callee.expr) ): @@ -605,7 +605,7 @@ def method_fullname(self, object_type: Type, method_name: str) -> str | None: elif isinstance(object_type, TupleType): type_name = tuple_fallback(object_type).type.fullname - if type_name is not None: + if type_name: return f"{type_name}.{method_name}" else: return None @@ -5489,7 +5489,7 @@ def type_info_from_type(typ: Type) -> TypeInfo | None: def is_operator_method(fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False short_name = fullname.split(".")[-1] return ( diff --git a/mypy/nodes.py b/mypy/nodes.py index 80ab787f4a9c..85bb9ce4a8de 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -25,7 +25,6 @@ from mypy_extensions import trait import mypy.strconv -from mypy.bogus_type import Bogus from mypy.util import short_type from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor @@ -247,12 +246,10 @@ class SymbolNode(Node): def name(self) -> str: pass - # fullname can often be None even though the type system - # disagrees. We mark this with Bogus to let mypyc know not to - # worry about it. + # Fully qualified name @property @abstractmethod - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: pass @abstractmethod @@ -294,7 +291,7 @@ class MypyFile(SymbolNode): __match_args__ = ("name", "path", "defs") # Fully qualified module name - _fullname: Bogus[str] + _fullname: str # Path to the file (empty string if not known) path: str # Top-level definitions and statements @@ -361,7 +358,7 @@ def name(self) -> str: return "" if not self._fullname else self._fullname.split(".")[-1] @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -526,8 +523,7 @@ def __init__(self) -> None: self.is_static = False self.is_final = False # Name with module prefix - # TODO: Type should be Optional[str] - self._fullname = cast(Bogus[str], None) + self._fullname = "" @property @abstractmethod @@ -535,7 +531,7 @@ def name(self) -> str: pass @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname @@ -871,7 +867,7 @@ def name(self) -> str: return self.func.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self.func.fullname @property @@ -967,7 +963,7 @@ def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: super().__init__() self._name = name # Name without module prefix # TODO: Should be Optional[str] - self._fullname = cast("Bogus[str]", None) # Name with module prefix + self._fullname = "" # Name with module prefix # TODO: Should be Optional[TypeInfo] self.info = VAR_NO_INFO self.type: mypy.types.Type | None = type # Declared or inferred type, or None @@ -1019,7 +1015,7 @@ def name(self) -> str: return self._name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -1057,7 +1053,7 @@ class ClassDef(Statement): __slots__ = ( "name", - "fullname", + "_fullname", "defs", "type_vars", "base_type_exprs", @@ -1075,7 +1071,7 @@ class ClassDef(Statement): __match_args__ = ("name", "defs") name: str # Name of the class without module prefix - fullname: Bogus[str] # Fully qualified name of the class + _fullname: str # Fully qualified name of the class defs: Block type_vars: list[mypy.types.TypeVarLikeType] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) @@ -1102,7 +1098,7 @@ def __init__( ) -> None: super().__init__() self.name = name - self.fullname = None # type: ignore[assignment] + self._fullname = "" self.defs = defs self.type_vars = type_vars or [] self.base_type_exprs = base_type_exprs or [] @@ -1117,6 +1113,14 @@ def __init__( self.deco_line: int | None = None self.removed_statements = [] + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) @@ -1725,7 +1729,7 @@ class RefExpr(Expression): __slots__ = ( "kind", "node", - "fullname", + "_fullname", "is_new_def", "is_inferred_def", "is_alias_rvalue", @@ -1739,7 +1743,7 @@ def __init__(self) -> None: # Var, FuncDef or TypeInfo that describes this self.node: SymbolNode | None = None # Fully qualified name (or name if not global) - self.fullname: str | None = None + self._fullname = "" # Does this define a new name? self.is_new_def = False # Does this define a new name with inferred type? @@ -1752,6 +1756,14 @@ def __init__(self) -> None: # Cache type guard from callable_type.type_guard self.type_guard: mypy.types.Type | None = None + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + class NameExpr(RefExpr): """Name expression @@ -2806,7 +2818,7 @@ class is generic then it will be a type constructor of higher kind. "self_type", ) - _fullname: Bogus[str] # Fully qualified name + _fullname: str # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. @@ -3023,7 +3035,7 @@ def name(self) -> str: return self.defn.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def is_generic(self) -> bool: @@ -3739,11 +3751,7 @@ def serialize(self, prefix: str, name: str) -> JsonDict: if prefix is not None: fullname = self.node.fullname if ( - # See the comment above SymbolNode.fullname -- fullname can often be None, - # but for complex reasons it's annotated as being `Bogus[str]` instead of `str | None`, - # meaning mypy erroneously thinks the `fullname is not None` check here is redundant - fullname is not None # type: ignore[redundant-expr] - and "." in fullname + "." in fullname and fullname != prefix + "." + name and not (isinstance(self.node, Var) and self.node.from_module_getattr) ): diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index c63c62c3e393..9a58df04371f 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -287,7 +287,7 @@ def is_undefined(self, name: str) -> bool: def refers_to_builtin(o: RefExpr) -> bool: - return o.fullname is not None and o.fullname.startswith("builtins.") + return o.fullname.startswith("builtins.") class Loop: diff --git a/mypy/semanal.py b/mypy/semanal.py index aee355d7880d..acc485a609e0 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3022,13 +3022,13 @@ def analyze_lvalues(self, s: AssignmentStmt) -> None: def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: if not isinstance(s.rvalue, CallExpr): return - fname = None + fname = "" call = s.rvalue while True: if isinstance(call.callee, RefExpr): fname = call.callee.fullname # check if method call - if fname is None and isinstance(call.callee, MemberExpr): + if not fname and isinstance(call.callee, MemberExpr): callee_expr = call.callee.expr if isinstance(callee_expr, RefExpr) and callee_expr.fullname: method_name = call.callee.name @@ -4624,7 +4624,7 @@ def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: else: expr.kind = sym.kind expr.node = sym.node - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" def visit_super_expr(self, expr: SuperExpr) -> None: if not self.type and not expr.call.args: @@ -4849,7 +4849,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: self.process_placeholder(expr.name, "attribute", expr) return expr.kind = sym.kind - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" expr.node = sym.node elif isinstance(base, RefExpr): # This branch handles the case C.bar (or cls.bar or self.bar inside @@ -4881,7 +4881,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: if not n: return expr.kind = n.kind - expr.fullname = n.fullname + expr.fullname = n.fullname or "" expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: @@ -5341,7 +5341,7 @@ def is_overloaded_item(self, node: SymbolNode, statement: Statement) -> bool: return False def is_defined_in_current_module(self, fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False return module_prefix(self.modules, fullname) == self.cur_mod_id diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index b0666f8e1ff4..05af6a3d53a1 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -230,7 +230,7 @@ def visit_op_expr(self, node: OpExpr) -> None: def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None - node.fullname = None + node.fullname = "" node.is_new_def = False node.is_inferred_def = False diff --git a/mypy/server/deps.py b/mypy/server/deps.py index eb40737061bf..50b66b70b8aa 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -289,13 +289,9 @@ def visit_decorator(self, o: Decorator) -> None: # all call sites, making them all `Any`. for d in o.decorators: tname: str | None = None - if isinstance(d, RefExpr) and d.fullname is not None: + if isinstance(d, RefExpr) and d.fullname: tname = d.fullname - if ( - isinstance(d, CallExpr) - and isinstance(d.callee, RefExpr) - and d.callee.fullname is not None - ): + if isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname: tname = d.callee.fullname if tname is not None: self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname)) @@ -500,7 +496,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if ( isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) - and rvalue.callee.fullname is not None + and rvalue.callee.fullname ): fname: str | None = None if isinstance(rvalue.callee.node, TypeInfo): @@ -510,7 +506,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: fname = init.node.fullname else: fname = rvalue.callee.fullname - if fname is None: + if not fname: return for lv in o.lvalues: if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: @@ -638,7 +634,7 @@ def visit_del_stmt(self, o: DelStmt) -> None: # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: - if o.fullname is not None: + if o.fullname: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its diff --git a/mypy/strconv.py b/mypy/strconv.py index 861a7c9b7fa0..b2e9da5dbf6a 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -367,7 +367,7 @@ def pretty_name( id = "" if isinstance(target_node, mypy.nodes.MypyFile) and name == fullname: n += id - elif kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): + elif kind == mypy.nodes.GDEF or (fullname != name and fullname): # Append fully qualified name for global references. n += f" [{fullname}{id}]" elif kind == mypy.nodes.LDEF: diff --git a/mypy/stubtest.py b/mypy/stubtest.py index bfd8e2b9c81a..774f03cbbdd0 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1147,7 +1147,7 @@ def apply_decorator_to_funcitem( ) -> nodes.FuncItem | None: if not isinstance(decorator, nodes.RefExpr): return None - if decorator.fullname is None: + if not decorator.fullname: # Happens with namedtuple return None if ( diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py index 6cfd53f09beb..71ebc43df8c2 100644 --- a/mypy/test/testsemanal.py +++ b/mypy/test/testsemanal.py @@ -202,7 +202,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: for f in result.files.values(): for n in f.names.values(): if isinstance(n.node, TypeInfo): - assert n.fullname is not None + assert n.fullname typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index db83768bf68a..9b432d8e68ec 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -129,7 +129,7 @@ def bind_existing(self, tvar_def: TypeVarLikeType) -> None: def get_binding(self, item: str | SymbolTableNode) -> TypeVarLikeType | None: fullname = item.fullname if isinstance(item, SymbolTableNode) else item - assert fullname is not None + assert fullname if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index df74344fb392..07720afeff88 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1123,7 +1123,7 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: - n = None if t.fullname is None else self.api.lookup_fully_qualified(t.fullname) + n = None if not t.fullname else self.api.lookup_fully_qualified(t.fullname) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index c24207ac64ec..f2a70d4e8691 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -1001,7 +1001,7 @@ def call_refexpr_with_args( ) -> Value: # Handle data-driven special-cased primitive call ops. - if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values): + if callee.fullname and expr.arg_kinds == [ARG_POS] * len(arg_values): call_c_ops_candidates = function_ops.get(callee.fullname, []) target = self.builder.matching_call_c( call_c_ops_candidates, arg_values, expr.line, self.node_type(expr) @@ -1026,7 +1026,7 @@ def call_refexpr_with_args( callee_node = callee_node.func if ( callee_node is not None - and callee.fullname is not None + and callee.fullname and callee_node in self.mapper.func_to_decl and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds) ): @@ -1240,7 +1240,7 @@ def load_global(self, expr: NameExpr) -> Value: and isinstance(expr.node, TypeInfo) and not self.is_synthetic_type(expr.node) ): - assert expr.fullname is not None + assert expr.fullname return self.load_native_type_object(expr.fullname) return self.load_global_str(expr.name, expr.line) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 5447f945db25..523f8c299c2f 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -789,7 +789,7 @@ def load_type(builder: IRBuilder, typ: TypeInfo, line: int) -> Value: def load_func(builder: IRBuilder, func_name: str, fullname: str | None, line: int) -> Value: - if fullname is not None and not fullname.startswith(builder.current_module): + if fullname and not fullname.startswith(builder.current_module): # we're calling a function in a different module # We can't use load_module_attr_by_fullname here because we need to load the function using diff --git a/test-data/unit/plugins/customentry.py b/test-data/unit/plugins/customentry.py index f8b86c33dcfc..b3dacfd4cf44 100644 --- a/test-data/unit/plugins/customentry.py +++ b/test-data/unit/plugins/customentry.py @@ -4,7 +4,7 @@ class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook - assert fullname is not None + assert fullname return None def my_hook(ctx): From f37a0aeeb89b87de2d2bcdb7a749e4e6f63a3185 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 14 Jan 2023 18:54:57 -0800 Subject: [PATCH 181/292] Sync typeshed (#14449) Source commit: https://github.com/python/typeshed/commit/ea0ae2155e8a04c9837903c3aff8dd5ad5f36ebc Note that you will need to close and re-open the PR in order to trigger CI. Co-authored-by: mypybot <> Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 4 + mypy/typeshed/stdlib/builtins.pyi | 17 +- mypy/typeshed/stdlib/compileall.pyi | 40 ++- mypy/typeshed/stdlib/contextlib.pyi | 4 +- mypy/typeshed/stdlib/distutils/dist.pyi | 4 +- mypy/typeshed/stdlib/email/message.pyi | 4 +- mypy/typeshed/stdlib/genericpath.pyi | 18 +- mypy/typeshed/stdlib/http/server.pyi | 29 +-- mypy/typeshed/stdlib/io.pyi | 6 +- mypy/typeshed/stdlib/itertools.pyi | 6 + .../multiprocessing/resource_tracker.pyi | 4 +- mypy/typeshed/stdlib/os/__init__.pyi | 39 +-- mypy/typeshed/stdlib/posixpath.pyi | 8 +- mypy/typeshed/stdlib/shutil.pyi | 4 +- mypy/typeshed/stdlib/socketserver.pyi | 59 +++-- mypy/typeshed/stdlib/ssl.pyi | 40 ++- mypy/typeshed/stdlib/subprocess.pyi | 245 +++++++++--------- mypy/typeshed/stdlib/sysconfig.pyi | 8 +- mypy/typeshed/stdlib/tokenize.pyi | 4 +- mypy/typeshed/stdlib/urllib/parse.pyi | 6 +- mypy/typeshed/stdlib/venv/__init__.pyi | 3 + .../stdlib/xml/etree/ElementInclude.pyi | 4 +- .../typeshed/stdlib/xml/etree/ElementTree.pyi | 6 +- 23 files changed, 315 insertions(+), 247 deletions(-) diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index f01db74caf40..68ac2a9b1900 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -36,6 +36,9 @@ AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 # "Incomplete | None" instead of "Any | None". Incomplete: TypeAlias = Any +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object + # stable class IdentityFunction(Protocol): def __call__(self, __x: _T) -> _T: ... @@ -205,6 +208,7 @@ class HasFileno(Protocol): FileDescriptor: TypeAlias = int # stable FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath # stable class SupportsRead(Protocol[_T_co]): diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 8fbef893ac57..b2241bb60527 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -4,6 +4,7 @@ import types from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( AnyStr_co, + FileDescriptorOrPath, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -11,7 +12,6 @@ from _typeshed import ( OpenTextMode, ReadableBuffer, Self, - StrOrBytesPath, SupportsAdd, SupportsAiter, SupportsAnext, @@ -1320,13 +1320,12 @@ def next(__i: SupportsNext[_T]) -> _T: ... def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... def oct(__number: int | SupportsIndex) -> str: ... -_OpenFile = StrOrBytesPath | int # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed _Opener: TypeAlias = Callable[[str, int], int] # Text mode: always returns a TextIOWrapper @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenTextMode = ..., buffering: int = ..., encoding: str | None = ..., @@ -1339,7 +1338,7 @@ def open( # Unbuffered binary mode: returns a FileIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., @@ -1352,7 +1351,7 @@ def open( # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeUpdating, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1363,7 +1362,7 @@ def open( ) -> BufferedRandom: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeWriting, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1374,7 +1373,7 @@ def open( ) -> BufferedWriter: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeReading, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1387,7 +1386,7 @@ def open( # Buffering cannot be determined: fall back to BinaryIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., @@ -1400,7 +1399,7 @@ def open( # Fallback if mode is not specified @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: str, buffering: int = ..., encoding: str | None = ..., diff --git a/mypy/typeshed/stdlib/compileall.pyi b/mypy/typeshed/stdlib/compileall.pyi index dd1de3f496e7..4621500eda96 100644 --- a/mypy/typeshed/stdlib/compileall.pyi +++ b/mypy/typeshed/stdlib/compileall.pyi @@ -8,7 +8,7 @@ __all__ = ["compile_dir", "compile_file", "compile_path"] class _SupportsSearch(Protocol): def search(self, string: str) -> Any: ... -if sys.version_info >= (3, 9): +if sys.version_info >= (3, 10): def compile_dir( dir: StrPath, maxlevels: int | None = ..., @@ -21,7 +21,7 @@ if sys.version_info >= (3, 9): workers: int = ..., invalidation_mode: PycInvalidationMode | None = ..., *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved + stripdir: StrPath | None = ..., prependdir: StrPath | None = ..., limit_sl_dest: StrPath | None = ..., hardlink_dupes: bool = ..., @@ -36,7 +36,41 @@ if sys.version_info >= (3, 9): optimize: int = ..., invalidation_mode: PycInvalidationMode | None = ..., *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved + stripdir: StrPath | None = ..., + prependdir: StrPath | None = ..., + limit_sl_dest: StrPath | None = ..., + hardlink_dupes: bool = ..., + ) -> int: ... + +elif sys.version_info >= (3, 9): + def compile_dir( + dir: StrPath, + maxlevels: int | None = ..., + ddir: StrPath | None = ..., + force: bool = ..., + rx: _SupportsSearch | None = ..., + quiet: int = ..., + legacy: bool = ..., + optimize: int = ..., + workers: int = ..., + invalidation_mode: PycInvalidationMode | None = ..., + *, + stripdir: str | None = ..., # https://bugs.python.org/issue40447 + prependdir: StrPath | None = ..., + limit_sl_dest: StrPath | None = ..., + hardlink_dupes: bool = ..., + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = ..., + force: bool = ..., + rx: _SupportsSearch | None = ..., + quiet: int = ..., + legacy: bool = ..., + optimize: int = ..., + invalidation_mode: PycInvalidationMode | None = ..., + *, + stripdir: str | None = ..., # https://bugs.python.org/issue40447 prependdir: StrPath | None = ..., limit_sl_dest: StrPath | None = ..., hardlink_dupes: bool = ..., diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index ca8830439538..1a6642b643e3 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -1,6 +1,6 @@ import abc import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Self from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType @@ -193,7 +193,7 @@ else: def __exit__(self, *exctype: object) -> None: ... if sys.version_info >= (3, 11): - _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=int | StrOrBytesPath) + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): path: _T_fd_or_any_path diff --git a/mypy/typeshed/stdlib/distutils/dist.pyi b/mypy/typeshed/stdlib/distutils/dist.pyi index ef47e4e4d15a..fc1bce261e57 100644 --- a/mypy/typeshed/stdlib/distutils/dist.pyi +++ b/mypy/typeshed/stdlib/distutils/dist.pyi @@ -1,10 +1,10 @@ -from _typeshed import StrOrBytesPath, SupportsWrite +from _typeshed import FileDescriptorOrPath, SupportsWrite from collections.abc import Iterable, Mapping from distutils.cmd import Command from typing import IO, Any class DistributionMetadata: - def __init__(self, path: int | StrOrBytesPath | None = ...) -> None: ... + def __init__(self, path: FileDescriptorOrPath | None = ...) -> None: ... name: str | None version: str | None author: str | None diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index c6b77cdde054..58b1c1cd8f3d 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -76,8 +76,8 @@ class Message: ) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: str) -> None: ... - def raw_items(self) -> Iterator[tuple[str, str]]: ... + def set_raw(self, name: str, value: _HeaderType) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderType]]: ... class MIMEPart(Message): def __init__(self, policy: Policy | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi index 911d582fd538..46426b63c852 100644 --- a/mypy/typeshed/stdlib/genericpath.pyi +++ b/mypy/typeshed/stdlib/genericpath.pyi @@ -1,5 +1,5 @@ import os -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRichComparisonT +from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT from collections.abc import Sequence from typing import overload from typing_extensions import Literal, LiteralString @@ -31,16 +31,16 @@ def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... -def exists(path: StrOrBytesPath | int) -> bool: ... -def getsize(filename: StrOrBytesPath | int) -> int: ... -def isfile(path: StrOrBytesPath | int) -> bool: ... -def isdir(s: StrOrBytesPath | int) -> bool: ... +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: StrOrBytesPath | int) -> float: ... -def getmtime(filename: StrOrBytesPath | int) -> float: ... -def getctime(filename: StrOrBytesPath | int) -> float: ... -def samefile(f1: StrOrBytesPath | int, f2: StrOrBytesPath | int) -> bool: ... +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index 011d464b4653..04ac28c3278e 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -1,3 +1,4 @@ +import _socket import email.message import io import socketserver @@ -52,25 +53,15 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: dict[str, str] if sys.version_info >= (3, 12): - def __init__( - self, - request: socketserver._RequestType, - client_address: socketserver._AddressType, - server: socketserver.BaseServer, - *, - directory: str | None = ..., - index_pages: Sequence[str] | None = ..., - ) -> None: ... - else: - def __init__( - self, - request: socketserver._RequestType, - client_address: socketserver._AddressType, - server: socketserver.BaseServer, - *, - directory: str | None = ..., - ) -> None: ... - + index_pages: ClassVar[tuple[str, ...]] + def __init__( + self, + request: socketserver._RequestType, + client_address: _socket._RetAddress, + server: socketserver.BaseServer, + *, + directory: str | None = ..., + ) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index 9c4c769fe34b..c1889300f981 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -2,7 +2,7 @@ import abc import builtins import codecs import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import FileDescriptorOrPath, ReadableBuffer, Self, WriteableBuffer from collections.abc import Callable, Iterable, Iterator from os import _Opener from types import TracebackType @@ -92,9 +92,9 @@ class BufferedIOBase(IOBase): class FileIO(RawIOBase, BinaryIO): mode: str - name: StrOrBytesPath | int # type: ignore[assignment] + name: FileDescriptorOrPath # type: ignore[assignment] def __init__( - self, file: StrOrBytesPath | int, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... + self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... ) -> None: ... @property def closefd(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 7299ee8200db..3cc1bd00de79 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -271,3 +271,9 @@ if sys.version_info >= (3, 10): def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 12): + class batched(Iterator[_T_co], Generic[_T_co]): + def __new__(cls: type[Self], iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi index 50f3db67467b..e2b940796126 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Incomplete from collections.abc import Sized __all__ = ["ensure_running", "register", "unregister"] @@ -15,4 +15,4 @@ register = _resource_tracker.register unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd -def main(fd: StrOrBytesPath | int) -> None: ... +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 590d20576665..ec31cc5e2a76 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -3,6 +3,7 @@ from _typeshed import ( AnyStr_co, BytesPath, FileDescriptorLike, + FileDescriptorOrPath, GenericPath, OpenBinaryMode, OpenBinaryModeReading, @@ -370,9 +371,6 @@ def listdir(path: StrPath | None = ...) -> list[str]: ... def listdir(path: BytesPath) -> list[bytes]: ... @overload def listdir(path: int) -> list[str]: ... - -_FdOrAnyPath: TypeAlias = int | StrOrBytesPath - @final class DirEntry(Generic[AnyStr]): # This is what the scandir iterator yields @@ -676,16 +674,16 @@ if sys.platform != "win32": def write(__fd: int, __data: ReadableBuffer) -> int: ... def access( - path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... ) -> bool: ... -def chdir(path: _FdOrAnyPath) -> None: ... +def chdir(path: FileDescriptorOrPath) -> None: ... if sys.platform != "win32": def fchdir(fd: FileDescriptorLike) -> None: ... def getcwd() -> str: ... def getcwdb() -> bytes: ... -def chmod(path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix @@ -694,7 +692,9 @@ if sys.platform != "win32" and sys.platform != "linux": if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: ... - def chown(path: _FdOrAnyPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ... + ) -> None: ... def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... def link( @@ -718,7 +718,7 @@ if sys.platform != "win32": def major(__device: int) -> int: ... def minor(__device: int) -> int: ... def makedev(__major: int, __minor: int) -> int: ... - def pathconf(path: _FdOrAnyPath, name: str | int) -> int: ... # Unix only + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... @@ -739,20 +739,20 @@ def scandir(path: None = ...) -> _ScandirIterator[str]: ... def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: _FdOrAnyPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... if sys.platform != "win32": - def statvfs(path: _FdOrAnyPath) -> statvfs_result: ... # Unix only + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = ..., *, dir_fd: int | None = ...) -> None: ... if sys.platform != "win32": def sync() -> None: ... # Unix only -def truncate(path: _FdOrAnyPath, length: int) -> None: ... # Unix only up to version 3.4 +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... def utime( - path: _FdOrAnyPath, + path: FileDescriptorOrPath, times: tuple[int, int] | tuple[float, float] | None = ..., *, ns: tuple[int, int] = ..., @@ -786,11 +786,16 @@ if sys.platform != "win32": dir_fd: int | None = ..., ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... - def listxattr(path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... - def removexattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... def setxattr( - path: _FdOrAnyPath, attribute: StrOrBytesPath, value: ReadableBuffer, flags: int = ..., *, follow_symlinks: bool = ... + path: FileDescriptorOrPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = ..., + *, + follow_symlinks: bool = ..., ) -> None: ... def abort() -> NoReturn: ... @@ -825,7 +830,7 @@ _ExecVArgs: TypeAlias = ( _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... -def execve(path: _FdOrAnyPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def _exit(status: int) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index 8d880a072dfb..ff9c2482ace5 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import AnyOrLiteralStr, BytesPath, StrOrBytesPath, StrPath +from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Sequence from genericpath import ( commonprefix as commonprefix, @@ -147,6 +147,6 @@ def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... def isabs(s: StrOrBytesPath) -> bool: ... -def islink(path: StrOrBytesPath | int) -> bool: ... -def ismount(path: StrOrBytesPath | int) -> bool: ... -def lexists(path: StrOrBytesPath | int) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 568879d76003..6dbfbcc06998 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -1,6 +1,6 @@ import os import sys -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload from typing_extensions import TypeAlias @@ -118,7 +118,7 @@ class _ntuple_diskusage(NamedTuple): used: int free: int -def disk_usage(path: int | StrOrBytesPath) -> _ntuple_diskusage: ... +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index e597818ef7da..b5147d356ffe 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -1,6 +1,7 @@ import sys import types -from _typeshed import Self +from _socket import _Address, _RetAddress +from _typeshed import ReadableBuffer, Self from collections.abc import Callable from socket import socket as _socket from typing import Any, BinaryIO, ClassVar, Union @@ -29,38 +30,39 @@ if sys.platform != "win32": ] _RequestType: TypeAlias = Union[_socket, tuple[bytes, _socket]] -_AddressType: TypeAlias = Union[tuple[str, int], str] +_AfUnixAddress: TypeAlias = str | ReadableBuffer # adddress acceptable for an AF_UNIX socket +_AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket # This can possibly be generic at some point: class BaseServer: address_family: int - server_address: tuple[str, int] + server_address: _Address socket: _socket allow_reuse_address: bool request_queue_size: int socket_type: int timeout: float | None def __init__( - self: Self, server_address: Any, RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler] + self: Self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: ... # It is not actually a `@property`, but we need a `Self` type: @property - def RequestHandlerClass(self: Self) -> Callable[[Any, Any, Self], BaseRequestHandler]: ... + def RequestHandlerClass(self: Self) -> Callable[[Any, _RetAddress, Self], BaseRequestHandler]: ... @RequestHandlerClass.setter - def RequestHandlerClass(self: Self, val: Callable[[Any, Any, Self], BaseRequestHandler]) -> None: ... + def RequestHandlerClass(self: Self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... - def finish_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def get_request(self) -> tuple[Any, Any]: ... - def handle_error(self, request: _RequestType, client_address: _AddressType) -> None: ... + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... def handle_timeout(self) -> None: ... - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... - def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... def __enter__(self: Self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None @@ -72,32 +74,35 @@ class BaseServer: class TCPServer(BaseServer): if sys.version_info >= (3, 11): allow_reuse_port: bool + server_address: _AfInetAddress # type: ignore[assignment] def __init__( self: Self, - server_address: tuple[str, int], - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfInetAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... - def get_request(self) -> tuple[_socket, Any]: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... class UDPServer(TCPServer): max_packet_size: ClassVar[int] - def get_request(self) -> tuple[tuple[bytes, _socket], Any]: ... # type: ignore[override] + def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] if sys.platform != "win32": class UnixStreamServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... class UnixDatagramServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... @@ -110,14 +115,14 @@ if sys.platform != "win32": def collect_children(self, *, blocking: bool = ...) -> None: ... # undocumented def handle_timeout(self) -> None: ... # undocumented def service_actions(self) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... class ThreadingMixIn: daemon_threads: bool block_on_close: bool - def process_request_thread(self, request: _RequestType, client_address: _AddressType) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... if sys.platform != "win32": @@ -132,16 +137,16 @@ if sys.platform != "win32": class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: - # Those are technically of types, respectively: - # * _RequestType - # * _AddressType - # But there are some concerns that having unions here would cause + # `request` is technically of type _RequestType, + # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) + # + # Note also that _RetAddress is also just an alias for `Any` request: Any - client_address: Any + client_address: _RetAddress server: BaseServer - def __init__(self, request: _RequestType, client_address: _AddressType, server: BaseServer) -> None: ... + def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ... def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 91844e8369df..6d7df5e1c202 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -63,18 +63,34 @@ def create_default_context( capath: StrOrBytesPath | None = ..., cadata: str | ReadableBuffer | None = ..., ) -> SSLContext: ... -def _create_unverified_context( - protocol: int = ..., - *, - cert_reqs: int = ..., - check_hostname: bool = ..., - purpose: Purpose = ..., - certfile: StrOrBytesPath | None = ..., - keyfile: StrOrBytesPath | None = ..., - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | ReadableBuffer | None = ..., -) -> SSLContext: ... + +if sys.version_info >= (3, 10): + def _create_unverified_context( + protocol: int | None = None, + *, + cert_reqs: int = ..., + check_hostname: bool = ..., + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = ..., + keyfile: StrOrBytesPath | None = ..., + cafile: StrOrBytesPath | None = ..., + capath: StrOrBytesPath | None = ..., + cadata: str | ReadableBuffer | None = ..., + ) -> SSLContext: ... + +else: + def _create_unverified_context( + protocol: int = ..., + *, + cert_reqs: int = ..., + check_hostname: bool = ..., + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = ..., + keyfile: StrOrBytesPath | None = ..., + cafile: StrOrBytesPath | None = ..., + capath: StrOrBytesPath | None = ..., + cadata: str | ReadableBuffer | None = ..., + ) -> SSLContext: ... _create_default_https_context: Callable[..., SSLContext] diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 450eb8cd24d1..c0b10a7781c3 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import ReadableBuffer, Self, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, TypeVar, overload @@ -63,13 +63,13 @@ if sys.platform == "win32": # except TimeoutError as e: # reveal_type(e.cmd) # Any, but morally is _CMD _FILE: TypeAlias = None | int | IO[Any] -_TXT: TypeAlias = bytes | str +_InputString: TypeAlias = ReadableBuffer | str if sys.version_info >= (3, 8): _CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] else: # Python 3.7 doesn't support _CMD being a single PathLike. # See: https://bugs.python.org/issue31961 - _CMD: TypeAlias = _TXT | Sequence[StrOrBytesPath] + _CMD: TypeAlias = str | bytes | Sequence[StrOrBytesPath] if sys.platform == "win32": _ENV: TypeAlias = Mapping[str, str] else: @@ -118,7 +118,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -152,7 +152,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -186,7 +186,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -255,7 +255,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -266,7 +266,7 @@ if sys.version_info >= (3, 11): check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -289,7 +289,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -300,7 +300,7 @@ if sys.version_info >= (3, 11): check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -326,7 +326,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -359,7 +359,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -392,7 +392,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -459,7 +459,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -470,7 +470,7 @@ elif sys.version_info >= (3, 10): check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -492,7 +492,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -503,7 +503,7 @@ elif sys.version_info >= (3, 10): check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -528,7 +528,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -560,7 +560,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -592,7 +592,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -657,7 +657,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -668,7 +668,7 @@ elif sys.version_info >= (3, 9): check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -689,7 +689,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -700,7 +700,7 @@ elif sys.version_info >= (3, 9): check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -723,7 +723,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -751,7 +751,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -779,7 +779,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -836,7 +836,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -847,7 +847,7 @@ else: check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., ) -> CompletedProcess[bytes]: ... @@ -864,7 +864,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -875,7 +875,7 @@ else: check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., ) -> CompletedProcess[Any]: ... @@ -895,7 +895,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -926,7 +926,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -956,7 +956,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -984,7 +984,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1010,7 +1010,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1041,7 +1041,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1071,7 +1071,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1099,7 +1099,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1124,7 +1124,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1132,7 +1132,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1155,7 +1155,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1163,7 +1163,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1186,7 +1186,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1194,7 +1194,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1226,7 +1226,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1249,7 +1249,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1257,7 +1257,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1280,7 +1280,7 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1288,7 +1288,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1298,7 +1298,7 @@ if sys.version_info >= (3, 11): umask: int = ..., pipesize: int = ..., process_group: int | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -1314,7 +1314,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1322,7 +1322,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1344,7 +1344,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1352,7 +1352,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1374,7 +1374,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1382,7 +1382,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1413,7 +1413,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1435,7 +1435,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1443,7 +1443,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1465,7 +1465,7 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1473,7 +1473,7 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1482,7 +1482,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = ..., umask: int = ..., pipesize: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1498,7 +1498,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1506,7 +1506,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1527,7 +1527,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1535,7 +1535,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1556,7 +1556,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1564,7 +1564,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1594,7 +1594,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1615,7 +1615,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1623,7 +1623,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1644,7 +1644,7 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1652,7 +1652,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1660,7 +1660,7 @@ elif sys.version_info >= (3, 9): group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., umask: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes else: @overload @@ -1675,7 +1675,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1683,7 +1683,7 @@ else: pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1700,7 +1700,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1708,7 +1708,7 @@ else: pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1725,7 +1725,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1733,7 +1733,7 @@ else: pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1759,7 +1759,7 @@ else: pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1776,7 +1776,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1784,7 +1784,7 @@ else: pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1801,7 +1801,7 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1809,11 +1809,11 @@ else: pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes PIPE: int STDOUT: int @@ -1822,11 +1822,11 @@ DEVNULL: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): - def __init__(self, cmd: _CMD, timeout: float, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__(self, cmd: _CMD, timeout: float, output: str | bytes | None = ..., stderr: str | bytes | None = ...) -> None: ... # morally: _CMD cmd: Any timeout: float - # morally: _TXT | None + # morally: str | bytes | None output: Any stdout: bytes | None stderr: bytes | None @@ -1835,13 +1835,15 @@ class CalledProcessError(SubprocessError): returncode: int # morally: _CMD cmd: Any - # morally: _TXT | None + # morally: str | bytes | None output: Any - # morally: _TXT | None + # morally: str | bytes | None stdout: Any stderr: Any - def __init__(self, returncode: int, cmd: _CMD, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__( + self, returncode: int, cmd: _CMD, output: str | bytes | None = ..., stderr: str | bytes | None = ... + ) -> None: ... class Popen(Generic[AnyStr]): args: _CMD @@ -1868,7 +1870,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1899,7 +1901,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1962,7 +1964,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -1993,7 +1995,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2024,7 +2026,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2057,7 +2059,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2087,7 +2089,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2148,7 +2150,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2178,7 +2180,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2208,7 +2210,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2240,7 +2242,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2269,7 +2271,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2328,7 +2330,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2357,7 +2359,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2386,7 +2388,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2416,7 +2418,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2441,7 +2443,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2492,7 +2494,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2517,7 +2519,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2542,7 +2544,7 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., @@ -2556,13 +2558,10 @@ class Popen(Generic[AnyStr]): def poll(self) -> int | None: ... def wait(self, timeout: float | None = ...) -> int: ... - # Return str/bytes - def communicate( - self, - input: AnyStr | None = ..., - timeout: float | None = ..., - # morally this should be optional - ) -> tuple[AnyStr, AnyStr]: ... + # morally the members of the returned tuple should be optional + # TODO this should allow ReadableBuffer for Popen[bytes], but adding + # overloads for that runs into a mypy bug (python/mypy#14070). + def communicate(self, input: AnyStr | None = ..., timeout: float | None = ...) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... @@ -2575,12 +2574,12 @@ class Popen(Generic[AnyStr]): # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... - def getoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... + def getstatusoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... else: - def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... - def getoutput(cmd: _TXT) -> str: ... + def getstatusoutput(cmd: str | bytes) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes) -> str: ... if sys.version_info >= (3, 8): def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/sysconfig.pyi b/mypy/typeshed/stdlib/sysconfig.pyi index 895abc2cd047..4b6257b5f62e 100644 --- a/mypy/typeshed/stdlib/sysconfig.pyi +++ b/mypy/typeshed/stdlib/sysconfig.pyi @@ -32,7 +32,13 @@ def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = ..., ex def get_paths(scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... -def is_python_build(check_home: bool = ...) -> bool: ... + +if sys.version_info >= (3, 11): + def is_python_build(check_home: object = None) -> bool: ... + +else: + def is_python_build(check_home: bool = False) -> bool: ... + def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = ...) -> dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index 7c00b507a528..ba57402fb845 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from token import * @@ -125,7 +125,7 @@ def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented -def open(filename: StrOrBytesPath | int) -> TextIO: ... +def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index efb91a4b34ff..8fe5d8b37ac0 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -158,10 +158,10 @@ _Q = TypeVar("_Q", bound=str | Iterable[int]) def urlencode( query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], - doseq: bool = ..., + doseq: bool = False, safe: _Q = ..., - encoding: str = ..., - errors: str = ..., + encoding: str | None = None, + errors: str | None = None, quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., ) -> str: ... def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/venv/__init__.pyi b/mypy/typeshed/stdlib/venv/__init__.pyi index 2e34aed4c693..dfa0b69b0870 100644 --- a/mypy/typeshed/stdlib/venv/__init__.pyi +++ b/mypy/typeshed/stdlib/venv/__init__.pyi @@ -1,8 +1,11 @@ +import logging import sys from _typeshed import StrOrBytesPath from collections.abc import Sequence from types import SimpleNamespace +logger: logging.Logger + if sys.version_info >= (3, 9): CORE_VENV_DEPS: tuple[str, ...] diff --git a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi index 3e3e3f266206..43b394bd67ec 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import FileDescriptorOrPath from collections.abc import Callable from xml.etree.ElementTree import Element @@ -12,7 +12,7 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: StrOrBytesPath | int, parse: str, encoding: str | None = ...) -> str | Element: ... +def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = ...) -> str | Element: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index c063c1fd3488..2b6191a395c3 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,6 +1,6 @@ import sys from _collections_abc import dict_keys -from _typeshed import FileDescriptor, ReadableBuffer, StrOrBytesPath, SupportsRead, SupportsWrite +from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence from typing import Any, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard @@ -38,8 +38,8 @@ if sys.version_info >= (3, 9): __all__ += ["indent"] _T = TypeVar("_T") -_FileRead: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsRead[bytes] | SupportsRead[str] -_FileWriteC14N: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsWrite[bytes] +_FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] +_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] _FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] VERSION: str From e9f5858c44b61c2d02819940debe3c31d099f9d5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 16 Jan 2023 08:30:42 +0000 Subject: [PATCH 182/292] Run lint in parallel in runtest.py (#14448) Give flake8 3 parallel processes. Previously it was sequential. Assuming that most developers have a machine with at least 4 cores, we have at least one left for self check, which is run in parallel with lint. --- runtests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtests.py b/runtests.py index be4ad4add08a..ade0a8adee5e 100755 --- a/runtests.py +++ b/runtests.py @@ -50,7 +50,7 @@ # Self type check "self": [executable, "-m", "mypy", "--config-file", "mypy_self_check.ini", "-p", "mypy"], # Lint - "lint": ["flake8", "-j0"], + "lint": ["flake8", "-j3"], "format-black": ["black", "."], "format-isort": ["isort", "."], # Fast test cases only (this is the bulk of the test suite) From e88f4a471f31c7b730477266244505fb7715f463 Mon Sep 17 00:00:00 2001 From: Richard Si Date: Mon, 16 Jan 2023 05:24:27 -0500 Subject: [PATCH 183/292] [mypyc] Always emit warnings (#14451) For example the warning for "treating generator comprehension as list" doesn't get printed unless there were errors too. This was due to the fact mypyc/build.py was only checking errors.num_errors to decide whether to print messages to STDOUT. To be honest, the generate_c() logic was pretty messy, so I broke out the message printing logic into a separate helper function and made liberal use of early exits. Fixes https://github.com/mypyc/mypyc/issues/873#issuecomment-871055474. --- mypyc/build.py | 51 +++++++++++++++----------------- mypyc/test-data/commandline.test | 6 ++++ mypyc/test/test_commandline.py | 5 ++++ 3 files changed, 35 insertions(+), 27 deletions(-) diff --git a/mypyc/build.py b/mypyc/build.py index a9aa16f5dfee..cc03eba95b4e 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -85,6 +85,15 @@ def fail(message: str) -> NoReturn: sys.exit(message) +def emit_messages(options: Options, messages: list[str], dt: float, serious: bool = False) -> None: + # ... you know, just in case. + if options.junit_xml: + py_version = f"{options.python_version[0]}_{options.python_version[1]}" + write_junit_xml(dt, serious, messages, options.junit_xml, py_version, options.platform) + if messages: + print("\n".join(messages)) + + def get_mypy_config( mypy_options: list[str], only_compile_paths: Iterable[str] | None, @@ -191,47 +200,35 @@ def generate_c( """ t0 = time.time() - # Do the actual work now - serious = False - result = None try: result = emitmodule.parse_and_typecheck( sources, options, compiler_options, groups, fscache ) - messages = result.errors except CompileError as e: - messages = e.messages - if not e.use_stdout: - serious = True + emit_messages(options, e.messages, time.time() - t0, serious=(not e.use_stdout)) + sys.exit(1) t1 = time.time() + if result.errors: + emit_messages(options, result.errors, t1 - t0) + sys.exit(1) + if compiler_options.verbose: print(f"Parsed and typechecked in {t1 - t0:.3f}s") - if not messages and result: - errors = Errors() - modules, ctext = emitmodule.compile_modules_to_c( - result, compiler_options=compiler_options, errors=errors, groups=groups - ) - - if errors.num_errors: - messages.extend(errors.new_messages()) - + errors = Errors() + modules, ctext = emitmodule.compile_modules_to_c( + result, compiler_options=compiler_options, errors=errors, groups=groups + ) t2 = time.time() + emit_messages(options, errors.new_messages(), t2 - t1) + if errors.num_errors: + # No need to stop the build if only warnings were emitted. + sys.exit(1) + if compiler_options.verbose: print(f"Compiled to C in {t2 - t1:.3f}s") - # ... you know, just in case. - if options.junit_xml: - py_version = f"{options.python_version[0]}_{options.python_version[1]}" - write_junit_xml( - t2 - t0, serious, messages, options.junit_xml, py_version, options.platform - ) - - if messages: - print("\n".join(messages)) - sys.exit(1) - return ctext, "\n".join(format_modules(modules)) diff --git a/mypyc/test-data/commandline.test b/mypyc/test-data/commandline.test index bc2713a20f7d..e7ba11192d28 100644 --- a/mypyc/test-data/commandline.test +++ b/mypyc/test-data/commandline.test @@ -223,3 +223,9 @@ def h(arg: str) -> None: @a.register def i(arg: Foo) -> None: pass + +[case testOnlyWarningOutput] +# cmd: test.py + +[file test.py] +names = (str(v) for v in [1, 2, 3]) # W: Treating generator comprehension as list diff --git a/mypyc/test/test_commandline.py b/mypyc/test/test_commandline.py index aafe1e4adc1b..f66ca2ec8ff0 100644 --- a/mypyc/test/test_commandline.py +++ b/mypyc/test/test_commandline.py @@ -58,6 +58,11 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: ) if "ErrorOutput" in testcase.name or cmd.returncode != 0: out += cmd.stdout + elif "WarningOutput" in testcase.name: + # Strip out setuptools build related output since we're only + # interested in the messages emitted during compilation. + messages, _, _ = cmd.stdout.partition(b"running build_ext") + out += messages if cmd.returncode == 0: # Run main program From f957a39f4338e6532dd7381230d07cf51c72f265 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 16 Jan 2023 11:07:21 +0000 Subject: [PATCH 184/292] Add scripts for running mypy and mypyc tests inside a Docker container (#14454) This speeds up running `pytest mypyc/test/test_run.py` by over 3x on a Mac mini M1. Mypy test performance is fairly similar to running natively. Example of how to use these on macOS: ``` $ colima start -c 8 # Start VM $ python3 misc/docker/build.py # Build Ubuntu container with test dependencies $ misc/docker/run.sh pytest mypyc/test/test_run.py # Run tests in container ``` Also add a README that documents how to use these. Closes #14453. --- misc/docker/Dockerfile | 12 +++++ misc/docker/README.md | 101 +++++++++++++++++++++++++++++++++++++ misc/docker/build.py | 46 +++++++++++++++++ misc/docker/run-wrapper.sh | 13 +++++ misc/docker/run.sh | 15 ++++++ 5 files changed, 187 insertions(+) create mode 100644 misc/docker/Dockerfile create mode 100644 misc/docker/README.md create mode 100644 misc/docker/build.py create mode 100755 misc/docker/run-wrapper.sh create mode 100755 misc/docker/run.sh diff --git a/misc/docker/Dockerfile b/misc/docker/Dockerfile new file mode 100644 index 000000000000..3327f9e38815 --- /dev/null +++ b/misc/docker/Dockerfile @@ -0,0 +1,12 @@ +FROM ubuntu:latest + +WORKDIR /mypy + +RUN apt-get update +RUN apt-get install -y python3 python3-pip clang + +COPY mypy-requirements.txt . +COPY test-requirements.txt . +COPY build-requirements.txt . + +RUN pip3 install -r test-requirements.txt diff --git a/misc/docker/README.md b/misc/docker/README.md new file mode 100644 index 000000000000..839f9761cb03 --- /dev/null +++ b/misc/docker/README.md @@ -0,0 +1,101 @@ +Running mypy and mypyc tests in a Docker container +================================================== + +This directory contains scripts for running mypy and mypyc tests in a +Linux Docker container. This allows running Linux tests on a different +operating system that supports Docker, or running tests in an +isolated, predictable environment on a Linux host operating system. + +Why use Docker? +--------------- + +Mypyc tests can be significantly faster in a Docker container than +running natively on macOS. + +Also, if it's inconvient to install the necessary dependencies on the +host operating system, or there are issues getting some tests to pass +on the host operating system, using a container can be an easy +workaround. + +Prerequisites +------------- + +First install Docker. On macOS, both Docker Desktop (proprietary, but +with a free of charge subscription for some use cases) and Colima (MIT +license) should work as runtimes. + +You may have to explicitly start the runtime first. Colima example +(replace '8' with the number of CPU cores you have): + +``` +$ colima start -c 8 + +``` + +How to run tests +---------------- + +You need to build the container with all necessary dependencies before +you can run tests: + +``` +$ python3 misc/docker/build.py +``` + +This creates a `mypy-test` Docker container that you can use to run +tests. + +You may need to run the script as root: + +``` +$ sudo python3 misc/docker/build.py +``` + +If you have a stale container which isn't up-to-date, use `--no-cache` +`--pull` to force rebuilding everything: + +``` +$ python3 misc/docker/build.py --no-cache --pull +``` + +Now you can run tests by using the `misc/docker/run.sh` script. Give +it the pytest command line you want to run as arguments. For example, +you can run mypyc tests like this: + +``` +$ misc/docker/run.sh pytest mypyc +``` + +You can also use `-k `, `-n0`, `-q`, etc. + +Again, you may need to run `run.sh` as root: + +``` +$ sudo misc/docker/run.sh pytest mypyc +``` + +You can also use `runtests.py` in the container. Example: + +``` +$ misc/docker/run.sh ./runtests.py self lint +``` + +Notes +----- + +File system changes within the container are not visible to the host +system. You can't use the container to format code using Black, for +example. + +On a mac, you may want to give additional CPU to the VM used to run +the container. The default allocation may be way too low (e.g. 2 CPU +cores). For example, use the `-c` option when starting the VM if you +use Colima: + +``` +$ colima start -c 8 +``` + +Giving access to all available CPUs to the Linux VM tends to provide +the best performance. This is not needed on a Linux host, since the +container is not run in a VM. diff --git a/misc/docker/build.py b/misc/docker/build.py new file mode 100644 index 000000000000..2103be3f110f --- /dev/null +++ b/misc/docker/build.py @@ -0,0 +1,46 @@ +"""Build a "mypy-test" Linux Docker container for running mypy/mypyc tests. + +This allows running Linux tests under a non-Linux operating system. Mypyc +tests can also run much faster under Linux that the host OS. + +NOTE: You may need to run this as root (using sudo). + +Run with "--no-cache" to force reinstallation of mypy dependencies. +Run with "--pull" to force update of the Linux (Ubuntu) base image. + +After you've built the container, use "run.sh" to run tests. Example: + + misc/docker/run.sh pytest mypyc/ +""" + +import argparse +import os +import subprocess +import sys + + +def main() -> None: + parser = argparse.ArgumentParser( + description="""Build a 'mypy-test' Docker container for running mypy/mypyc tests. You may + need to run this as root (using sudo).""" + ) + parser.add_argument("--no-cache", action="store_true", help="Force rebuilding") + parser.add_argument("--pull", action="store_true", help="Force pulling fresh Linux base image") + args = parser.parse_args() + + dockerdir = os.path.dirname(os.path.abspath(__file__)) + dockerfile = os.path.join(dockerdir, "Dockerfile") + rootdir = os.path.join(dockerdir, "..", "..") + + cmdline = ["docker", "build", "-t", "mypy-test", "-f", dockerfile] + if args.no_cache: + cmdline.append("--no-cache") + if args.pull: + cmdline.append("--pull") + cmdline.append(rootdir) + result = subprocess.run(cmdline) + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/misc/docker/run-wrapper.sh b/misc/docker/run-wrapper.sh new file mode 100755 index 000000000000..77e77d99af34 --- /dev/null +++ b/misc/docker/run-wrapper.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Internal wrapper script used to run commands in a container + +# Copy all the files we need from the mypy repo directory shared with +# the host to a local directory. Accessing files using a shared +# directory on a mac can be *very* slow. +echo "copying files to the container..." +cp -R /repo/{mypy,mypyc,test-data,misc} . +cp /repo/{pytest.ini,conftest.py,runtests.py,pyproject.toml,setup.cfg} . +cp /repo/{mypy_self_check.ini,mypy_bootstrap.ini} . + +# Run the wrapped command +"$@" diff --git a/misc/docker/run.sh b/misc/docker/run.sh new file mode 100755 index 000000000000..c8fc0e510e8e --- /dev/null +++ b/misc/docker/run.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Run mypy or mypyc tests in a Docker container that was built using misc/docker/build.py. +# +# Usage: misc/docker/run.sh ... +# +# For example, run mypyc tests like this: +# +# misc/docker/run.sh pytest mypyc +# +# NOTE: You may need to run this as root (using sudo). + +SCRIPT_DIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) +MYPY_DIR="$SCRIPT_DIR/../.." + +docker run -ti --rm -v "$MYPY_DIR:/repo" mypy-test /repo/misc/docker/run-wrapper.sh "$@" From f6256025c4af14f606da7aa0a3699c2417647686 Mon Sep 17 00:00:00 2001 From: jhance Date: Tue, 17 Jan 2023 08:17:38 -0800 Subject: [PATCH 185/292] Re-use constraints helper in constraints visitor (#14444) The visit_tuple_type method of the constraints visitor does similar logic to the new helper for building constraints for typevar tuples, so we reimplement it using that. This also fixes a bug that was uncovered in the implementation of the helper. --- mypy/constraints.py | 101 ++++++++++++++++++----------------- mypy/test/testconstraints.py | 8 +++ 2 files changed, 60 insertions(+), 49 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 63e1672eb162..697e793cb11d 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -995,58 +995,53 @@ def infer_against_overloaded( return infer_constraints(template, item, self.direction) def visit_tuple_type(self, template: TupleType) -> list[Constraint]: + actual = self.actual - # TODO: Support subclasses of Tuple + unpack_index = find_unpack_in_list(template.items) is_varlength_tuple = ( isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple" ) - unpack_index = find_unpack_in_list(template.items) - if unpack_index is not None: - unpack_item = get_proper_type(template.items[unpack_index]) - assert isinstance(unpack_item, UnpackType) - - unpacked_type = get_proper_type(unpack_item.type) - if isinstance(unpacked_type, TypeVarTupleType): + if isinstance(actual, TupleType) or is_varlength_tuple: + res: list[Constraint] = [] + if unpack_index is not None: if is_varlength_tuple: - # This case is only valid when the unpack is the only - # item in the tuple. - # - # TODO: We should support this in the case that all the items - # in the tuple besides the unpack have the same type as the - # varlength tuple's type. E.g. Tuple[int, ...] should be valid - # where we expect Tuple[int, Unpack[Ts]], but not for Tuple[str, Unpack[Ts]]. - assert len(template.items) == 1 - - if isinstance(actual, (TupleType, AnyType)) or is_varlength_tuple: - modified_actual = actual - if isinstance(actual, TupleType): - # Exclude the items from before and after the unpack index. - # TODO: Support including constraints from the prefix/suffix. - _, actual_items, _ = split_with_prefix_and_suffix( - tuple(actual.items), - unpack_index, - len(template.items) - unpack_index - 1, - ) - modified_actual = actual.copy_modified(items=list(actual_items)) - return [ - Constraint( - type_var=unpacked_type, op=self.direction, target=modified_actual - ) - ] + unpack_type = template.items[unpack_index] + assert isinstance(unpack_type, UnpackType) + unpacked_type = unpack_type.type + assert isinstance(unpacked_type, TypeVarTupleType) + return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)] + else: + assert isinstance(actual, TupleType) + ( + unpack_constraints, + actual_items, + template_items, + ) = find_and_build_constraints_for_unpack( + tuple(actual.items), tuple(template.items), self.direction + ) + res.extend(unpack_constraints) + elif isinstance(actual, TupleType): + actual_items = tuple(actual.items) + template_items = tuple(template.items) + else: + return res - if isinstance(actual, TupleType) and len(actual.items) == len(template.items): - if ( - actual.partial_fallback.type.is_named_tuple - and template.partial_fallback.type.is_named_tuple - ): - # For named tuples using just the fallbacks usually gives better results. - return infer_constraints( - template.partial_fallback, actual.partial_fallback, self.direction - ) - res: list[Constraint] = [] - for i in range(len(template.items)): - res.extend(infer_constraints(template.items[i], actual.items[i], self.direction)) + # Cases above will return if actual wasn't a TupleType. + assert isinstance(actual, TupleType) + if len(actual_items) == len(template_items): + if ( + actual.partial_fallback.type.is_named_tuple + and template.partial_fallback.type.is_named_tuple + ): + # For named tuples using just the fallbacks usually gives better results. + return res + infer_constraints( + template.partial_fallback, actual.partial_fallback, self.direction + ) + for i in range(len(template_items)): + res.extend( + infer_constraints(template_items[i], actual_items[i], self.direction) + ) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items, actual) @@ -1079,10 +1074,13 @@ def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]: def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]: res: list[Constraint] = [] for t in types: - # Note that we ignore variance and simply always use the - # original direction. This is because for Any targets direction is - # irrelevant in most cases, see e.g. is_same_constraint(). - res.extend(infer_constraints(t, any_type, self.direction)) + if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType): + res.append(Constraint(t.type, self.direction, any_type)) + else: + # Note that we ignore variance and simply always use the + # original direction. This is because for Any targets direction is + # irrelevant in most cases, see e.g. is_same_constraint(). + res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> list[Constraint]: @@ -1187,6 +1185,11 @@ def build_constraints_for_unpack( template_suffix_len: int, direction: int, ) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + if mapped_prefix_len is None: + mapped_prefix_len = template_prefix_len + if mapped_suffix_len is None: + mapped_suffix_len = template_suffix_len + split_result = split_with_mapped_and_template( mapped, mapped_prefix_len, diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index fc6960e0d8a0..b46f31327150 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -151,3 +151,11 @@ def test_unpack_tuple_length_non_match(self) -> None: Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } + + def test_var_length_tuple_with_fixed_length_tuple(self) -> None: + fx = self.fx + assert not infer_constraints( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), + Instance(fx.std_tuplei, [fx.a]), + SUPERTYPE_OF, + ) From acf26f43e9d26066fdcbe7850bbfab87b98cf8e0 Mon Sep 17 00:00:00 2001 From: Richard Si Date: Wed, 18 Jan 2023 05:14:41 -0500 Subject: [PATCH 186/292] [mypyc] Optimize int()/float()/complex() on native classes (#14450) int() and float() calls on native classes can simply call the associated dunder if the RInstance defines it, no need to load the type and call it. bool() calls were already optimized merely a few days ago, but there wasn't an IRbuild test verifying this so I added one. --- Follow up to https://github.com/python/mypy/pull/14422. I saw the PR and it reminded me that I had this old patch laying around :) --- mypy/nodes.py | 2 +- mypyc/irbuild/specialize.py | 16 ++++++++---- mypyc/test-data/irbuild-dunders.test | 38 +++++++++++++++++++++------- 3 files changed, 41 insertions(+), 15 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 85bb9ce4a8de..4a4de9d4503d 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1777,7 +1777,7 @@ class NameExpr(RefExpr): def __init__(self, name: str) -> None: super().__init__() - self.name = name # Name referred to (may be qualified) + self.name = name # Name referred to # Is this a l.h.s. of a special form assignment like typed dict or type variable? self.is_special_form = False diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index 06babd2f7e1a..e62350778f54 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -157,14 +157,20 @@ def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Va @specialize_function("builtins.abs") -def translate_abs(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: - """Specialize calls on native classes that implement __abs__.""" - if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]: +@specialize_function("builtins.int") +@specialize_function("builtins.float") +@specialize_function("builtins.complex") +def translate_builtins_with_unary_dunder( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Specialize calls on native classes that implement the associated dunder.""" + if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(callee, NameExpr): arg = expr.args[0] arg_typ = builder.node_type(arg) - if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method("__abs__"): + method = f"__{callee.name}__" + if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method(method): obj = builder.accept(arg) - return builder.gen_method_call(obj, "__abs__", [], None, expr.line) + return builder.gen_method_call(obj, method, [], None, expr.line) return None diff --git a/mypyc/test-data/irbuild-dunders.test b/mypyc/test-data/irbuild-dunders.test index 24e708913354..82f04dcdf687 100644 --- a/mypyc/test-data/irbuild-dunders.test +++ b/mypyc/test-data/irbuild-dunders.test @@ -154,6 +154,12 @@ class C: def __abs__(self) -> int: return 6 + def __bool__(self) -> bool: + return False + + def __complex__(self) -> complex: + return 7j + def f(c: C) -> None: -c ~c @@ -161,6 +167,8 @@ def f(c: C) -> None: float(c) +c abs(c) + bool(c) + complex(c) [out] def C.__neg__(self): self :: __main__.C @@ -188,19 +196,31 @@ def C.__abs__(self): self :: __main__.C L0: return 12 +def C.__bool__(self): + self :: __main__.C +L0: + return 0 +def C.__complex__(self): + self :: __main__.C + r0 :: object +L0: + r0 = 7j + return r0 def f(c): c :: __main__.C - r0, r1 :: int - r2, r3, r4, r5 :: object - r6, r7 :: int + r0, r1, r2 :: int + r3 :: float + r4, r5 :: int + r6 :: bool + r7 :: object L0: r0 = c.__neg__() r1 = c.__invert__() - r2 = load_address PyLong_Type - r3 = PyObject_CallFunctionObjArgs(r2, c, 0) - r4 = load_address PyFloat_Type - r5 = PyObject_CallFunctionObjArgs(r4, c, 0) - r6 = c.__pos__() - r7 = c.__abs__() + r2 = c.__int__() + r3 = c.__float__() + r4 = c.__pos__() + r5 = c.__abs__() + r6 = c.__bool__() + r7 = c.__complex__() return 1 From 914901f14e0e6223077a8433388c367138717451 Mon Sep 17 00:00:00 2001 From: jhance Date: Wed, 18 Jan 2023 02:19:42 -0800 Subject: [PATCH 187/292] Improve CallableType handling with typevar tuples (#14465) Mainly adds some tests that were previously disabled since they failed, as well as tests for handling prefices. Suffices tests are not added yet because when a suffix is involved we can't use the existing solution since it will construct positional args after *args, when we really need something like `*tuple[*args, ]` in order to encode this in the type info. We also refactor out the manipulation of the arg names/kinds/types into its own helper which will make control flow less annoying to deal with. We are deferring the implementation of suffices until more parts of the PEP are implemented since it doesn't seem as important (e.g. in other parts of the language this is not supported) --- mypy/expandtype.py | 168 ++++++++++++------------ test-data/unit/check-typevar-tuple.test | 43 +++++- 2 files changed, 126 insertions(+), 85 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 1c3553fe5e53..203c71b4e824 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -3,7 +3,7 @@ from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload from typing_extensions import Final -from mypy.nodes import ARG_POS, ARG_STAR, Var +from mypy.nodes import ARG_POS, ARG_STAR, ArgKind, Var from mypy.type_visitor import TypeTranslator from mypy.types import ( ANY_STRATEGY, @@ -258,6 +258,90 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | Instance | AnyType | None def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types)) + def interpolate_args_for_unpack( + self, t: CallableType, var_arg: UnpackType + ) -> tuple[list[str | None], list[ArgKind], list[Type]]: + star_index = t.arg_kinds.index(ARG_STAR) + + # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] + if isinstance(get_proper_type(var_arg.type), TupleType): + expanded_tuple = get_proper_type(var_arg.type.accept(self)) + # TODO: handle the case that expanded_tuple is a variable length tuple. + assert isinstance(expanded_tuple, TupleType) + expanded_items = expanded_tuple.items + else: + expanded_items_res = self.expand_unpack(var_arg) + if isinstance(expanded_items_res, list): + expanded_items = expanded_items_res + elif ( + isinstance(expanded_items_res, Instance) + and expanded_items_res.type.fullname == "builtins.tuple" + ): + # TODO: We shouldnt't simply treat this as a *arg because of suffix handling + # (there cannot be positional args after a *arg) + arg_types = ( + t.arg_types[:star_index] + + [expanded_items_res.args[0]] + + t.arg_types[star_index + 1 :] + ) + return (t.arg_names, t.arg_kinds, arg_types) + else: + return (t.arg_names, t.arg_kinds, t.arg_types) + + expanded_unpack_index = find_unpack_in_list(expanded_items) + # This is the case where we just have Unpack[Tuple[X1, X2, X3]] + # (for example if either the tuple had no unpacks, or the unpack in the + # tuple got fully expanded to something with fixed length) + if expanded_unpack_index is None: + arg_names = ( + t.arg_names[:star_index] + + [None] * len(expanded_items) + + t.arg_names[star_index + 1 :] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * len(expanded_items) + + t.arg_kinds[star_index + 1 :] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + # If Unpack[Ts] simplest form still has an unpack or is a + # homogenous tuple, then only the prefix can be represented as + # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] + # as the star arg, for example. + expanded_unpack = get_proper_type(expanded_items[expanded_unpack_index]) + assert isinstance(expanded_unpack, UnpackType) + + # Extract the typevartuple so we can get a tuple fallback from it. + expanded_unpacked_tvt = get_proper_type(expanded_unpack.type) + assert isinstance(expanded_unpacked_tvt, TypeVarTupleType) + + prefix_len = expanded_unpack_index + arg_names = t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] + arg_kinds = ( + t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType( + TupleType( + expanded_items[prefix_len:], expanded_unpacked_tvt.tuple_fallback + ) + ) + if len(expanded_items) - prefix_len > 1 + else expanded_items[0] + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + return (arg_names, arg_kinds, arg_types) + def visit_callable_type(self, t: CallableType) -> Type: param_spec = t.param_spec() if param_spec is not None: @@ -285,89 +369,11 @@ def visit_callable_type(self, t: CallableType) -> Type: var_arg = t.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - star_index = t.arg_kinds.index(ARG_STAR) - - # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] - if isinstance(get_proper_type(var_arg.typ.type), TupleType): - expanded_tuple = get_proper_type(var_arg.typ.type.accept(self)) - # TODO: handle the case that expanded_tuple is a variable length tuple. - assert isinstance(expanded_tuple, TupleType) - expanded_items = expanded_tuple.items - else: - expanded_items_res = self.expand_unpack(var_arg.typ) - # TODO: can it be anything except a list? - assert isinstance(expanded_items_res, list) - expanded_items = expanded_items_res - - """ - # In this case we keep the arg as ARG_STAR. - arg_names = t.arg_names - arg_kinds = t.arg_kinds - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - """ - - expanded_unpack_index = find_unpack_in_list(expanded_items) - # This is the case where we just have Unpack[Tuple[X1, X2, X3]] - # (for example if either the tuple had no unpacks, or the unpack in the - # tuple got fully expanded to something with fixed length) - if expanded_unpack_index is None: - arg_names = ( - t.arg_names[:star_index] - + [None] * len(expanded_items) - + t.arg_names[star_index + 1 :] - ) - arg_kinds = ( - t.arg_kinds[:star_index] - + [ARG_POS] * len(expanded_items) - + t.arg_kinds[star_index + 1 :] - ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_items - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - else: - # If Unpack[Ts] simplest form still has an unpack or is a - # homogenous tuple, then only the prefix can be represented as - # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] - # as the star arg, for example. - expanded_unpack = get_proper_type(expanded_items[expanded_unpack_index]) - assert isinstance(expanded_unpack, UnpackType) - - # Extract the typevartuple so we can get a tuple fallback from it. - expanded_unpacked_tvt = get_proper_type(expanded_unpack.type) - assert isinstance(expanded_unpacked_tvt, TypeVarTupleType) - - prefix_len = expanded_unpack_index - arg_names = ( - t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] - ) - arg_kinds = ( - t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] - ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_items[:prefix_len] - # Constructing the Unpack containing the tuple without the prefix. - + [ - UnpackType( - TupleType( - expanded_items[prefix_len:], expanded_unpacked_tvt.tuple_fallback - ) - ) - if len(expanded_items) - prefix_len > 1 - else expanded_items[0] - ] - + self.expand_types(t.arg_types[star_index + 1 :]) - ) + arg_names, arg_kinds, arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) else: - arg_types = self.expand_types(t.arg_types) arg_names = t.arg_names arg_kinds = t.arg_kinds + arg_types = self.expand_types(t.arg_types) return t.copy_modified( arg_types=arg_types, diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index f61b53dcd2c0..9afe709ed19b 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -466,14 +466,49 @@ from typing_extensions import Unpack, TypeVarTuple Ts = TypeVarTuple("Ts") def call( - target: Callable[[ Unpack[Ts]], None], + target: Callable[[Unpack[Ts]], None], args: Tuple[Unpack[Ts]], ) -> None: pass def func(arg1: int, arg2: str) -> None: ... +def func2(arg1: int, arg2: int) -> None: ... +def func3(*args: int) -> None: ... + +vargs: Tuple[int, ...] +vargs_str: Tuple[str, ...] + +call(target=func, args=(0, 'foo')) +call(target=func, args=('bar', 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[object, str], None]" +call(target=func, args=(True, 'foo', 0)) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=(0, 0, 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" + +# NOTE: This behavior may be a bit contentious, it is maybe inconsistent with our handling of +# PEP646 but consistent with our handling of callable constraints. +call(target=func2, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, int], None]"; expected "Callable[[VarArg(int)], None]" +call(target=func3, args=vargs) +call(target=func3, args=(0,1)) +call(target=func3, args=(0,'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func3, args=vargs_str) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableWithPrefixSuffix] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") -call(target=func, args=(0, 'foo')) # Valid -#call(target=func, args=(True, 'foo', 0)) # Error -#call(target=func, args=(0, 0, 'foo')) # Error +def call_prefix( + target: Callable[[bytes, Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func_prefix(arg0: bytes, arg1: int, arg2: str) -> None: ... +def func2_prefix(arg0: str, arg1: int, arg2: str) -> None: ... + +call_prefix(target=func_prefix, args=(0, 'foo')) +call_prefix(target=func2_prefix, args=(0, 'foo')) # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]" [builtins fixtures/tuple.pyi] + From 83660d0ad72ab5a61cea5fe5955e66c33ef54111 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 20 Jan 2023 03:10:01 -0800 Subject: [PATCH 188/292] Enable use-before-def error code by default (#14166) This enables the error code added in #14163. --- mypy/errorcodes.py | 5 +- test-data/unit/check-abstract.test | 8 +- test-data/unit/check-basic.test | 79 ++----- test-data/unit/check-classes.test | 161 +++++-------- test-data/unit/check-columns.test | 2 +- test-data/unit/check-dataclasses.test | 4 +- test-data/unit/check-dynamic-typing.test | 69 +++--- test-data/unit/check-enum.test | 2 +- test-data/unit/check-errorcodes.test | 6 +- test-data/unit/check-expressions.test | 74 +++--- test-data/unit/check-flags.test | 6 +- test-data/unit/check-functions.test | 26 +- test-data/unit/check-generics.test | 31 +-- test-data/unit/check-incremental.test | 10 +- test-data/unit/check-inference-context.test | 170 +++++++------ test-data/unit/check-inference.test | 146 ++++++------ test-data/unit/check-kwargs.test | 94 ++++---- test-data/unit/check-namedtuple.test | 4 +- test-data/unit/check-newsemanal.test | 249 ++++++++++---------- test-data/unit/check-overloading.test | 37 +-- test-data/unit/check-protocols.test | 2 +- test-data/unit/check-python39.test | 2 +- test-data/unit/check-recursive-types.test | 9 + test-data/unit/check-selftype.test | 8 +- test-data/unit/check-statements.test | 61 ++--- test-data/unit/check-tuples.test | 32 ++- test-data/unit/check-type-aliases.test | 7 +- test-data/unit/check-typeddict.test | 4 +- test-data/unit/check-varargs.test | 223 ++++++++---------- test-data/unit/fine-grained.test | 55 +++-- test-data/unit/semanal-basic.test | 15 +- test-data/unit/semanal-statements.test | 35 ++- test-data/unit/typexport-basic.test | 168 +++++++------ 33 files changed, 850 insertions(+), 954 deletions(-) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 6b266cc7b429..5696763ec9d1 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -193,10 +193,7 @@ def __str__(self) -> str: default_enabled=False, ) USED_BEFORE_DEF: Final[ErrorCode] = ErrorCode( - "used-before-def", - "Warn about variables that are used before they are defined", - "General", - default_enabled=False, + "used-before-def", "Warn about variables that are used before they are defined", "General" ) diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index f67d9859397e..98be314b9c27 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -102,16 +102,16 @@ class B(A, I): pass from abc import abstractmethod, ABCMeta +class I(metaclass=ABCMeta): + @abstractmethod + def f(self): pass + o = None # type: object t = None # type: type o = I t = I -class I(metaclass=ABCMeta): - @abstractmethod - def f(self): pass - [case testAbstractClassInCasts] from typing import cast from abc import abstractmethod, ABCMeta diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index a4056c8cb576..c16b9e40122d 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -12,25 +12,26 @@ class A: pass class B: pass [case testConstructionAndAssignment] -x = None # type: A -x = A() -if int(): - x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: def __init__(self): pass class B: def __init__(self): pass +x = None # type: A +x = A() +if int(): + x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testInheritInitFromObject] +class A(object): pass +class B(object): pass x = None # type: A if int(): x = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A(object): pass -class B(object): pass - [case testImplicitInheritInitFromObject] +class A: pass +class B: pass x = None # type: A o = None # type: object if int(): @@ -39,10 +40,6 @@ if int(): x = A() if int(): o = x -class A: pass -class B: pass -[out] - [case testTooManyConstructorArgs] import typing object(object()) @@ -51,21 +48,15 @@ main:2: error: Too many arguments for "object" [case testVarDefWithInit] import typing -a = A() # type: A -b = object() # type: A class A: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") - +a = A() # type: A +b = object() # type: A # E: Incompatible types in assignment (expression has type "object", variable has type "A") [case testInheritanceBasedSubtyping] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") - +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testDeclaredVariableInParentheses] (x) = None # type: int @@ -101,32 +92,22 @@ w = 1 # E: Incompatible types in assignment (expression has type "int", variabl [case testFunction] import typing -def f(x: 'A') -> None: pass -f(A()) -f(B()) # Fail class A: pass class B: pass -[out] -main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" - +def f(x: 'A') -> None: pass +f(A()) +f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [case testNotCallable] import typing -A()() class A: pass -[out] -main:2: error: "A" not callable - +A()() # E: "A" not callable [case testSubtypeArgument] import typing -def f(x: 'A', y: 'B') -> None: pass -f(B(), A()) # Fail -f(B(), B()) - class A: pass class B(A): pass -[out] -main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B" - +def f(x: 'A', y: 'B') -> None: pass +f(B(), A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(B(), B()) [case testInvalidArgumentCount] import typing def f(x, y) -> None: pass @@ -194,12 +175,10 @@ main:4: error: Incompatible types in assignment (expression has type "B", variab [case testVariableInitializationWithSubtype] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") -- Misc @@ -217,15 +196,11 @@ main:3: error: Incompatible return value type (got "B", expected "A") [case testTopLevelContextAndInvalidReturn] import typing -def f() -> 'A': - return B() -a = B() # type: A class A: pass class B: pass -[out] -main:3: error: Incompatible return value type (got "B", expected "A") -main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") - +def f() -> 'A': + return B() # E: Incompatible return value type (got "B", expected "A") +a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testEmptyReturnInAnyTypedFunction] from typing import Any def f() -> Any: @@ -252,6 +227,8 @@ reveal_type(__annotations__) # N: Revealed type is "builtins.dict[builtins.str, [case testLocalVariableShadowing] +class A: pass +class B: pass a = None # type: A if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -263,10 +240,6 @@ def f() -> None: a = B() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = A() - -class A: pass -class B: pass - [case testGlobalDefinedInBlockWithType] class A: pass while A: diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index b35b2f9e4e94..fce1aa1768f9 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -3,64 +3,56 @@ [case testMethodCall] +class A: + def foo(self, x: 'A') -> None: pass +class B: + def bar(self, x: 'B', y: A) -> None: pass a = None # type: A b = None # type: B -a.foo(B()) # Fail -a.bar(B(), A()) # Fail +a.foo(B()) # E: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" +a.bar(B(), A()) # E: "A" has no attribute "bar" a.foo(A()) b.bar(B(), A()) +[case testMethodCallWithSubtype] class A: def foo(self, x: 'A') -> None: pass -class B: - def bar(self, x: 'B', y: A) -> None: pass -[out] -main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" -main:6: error: "A" has no attribute "bar" - -[case testMethodCallWithSubtype] + def bar(self, x: 'B') -> None: pass +class B(A): pass a = None # type: A a.foo(A()) a.foo(B()) -a.bar(A()) # Fail +a.bar(A()) # E: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" a.bar(B()) +[case testInheritingMethod] class A: - def foo(self, x: 'A') -> None: pass - def bar(self, x: 'B') -> None: pass + def foo(self, x: 'B') -> None: pass class B(A): pass -[out] -main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" - -[case testInheritingMethod] a = None # type: B a.foo(A()) # Fail a.foo(B()) -class A: - def foo(self, x: 'B') -> None: pass -class B(A): pass -[targets __main__, __main__, __main__.A.foo] +[targets __main__, __main__.A.foo] [out] -main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" [case testMethodCallWithInvalidNumberOfArguments] +class A: + def foo(self, x: 'A') -> None: pass a = None # type: A a.foo() # Fail a.foo(object(), A()) # Fail - -class A: - def foo(self, x: 'A') -> None: pass [out] -main:3: error: Missing positional argument "x" in call to "foo" of "A" -main:4: error: Too many arguments for "foo" of "A" -main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" +main:5: error: Missing positional argument "x" in call to "foo" of "A" +main:6: error: Too many arguments for "foo" of "A" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" [case testMethodBody] import typing @@ -216,13 +208,11 @@ main:11: error: "B" has no attribute "a" [case testExplicitAttributeInBody] -a = None # type: A -a.x = object() # Fail -a.x = A() class A: x = None # type: A -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") +a = None # type: A +a.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") +a.x = A() [case testAttributeDefinedInNonInitMethod] import typing @@ -629,64 +619,50 @@ class B(A): [case testTrivialConstructor] -import typing -a = A() # type: A -b = A() # type: B # Fail class A: def __init__(self) -> None: pass -class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +a = A() # type: A +b = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +class B: pass [case testConstructor] -import typing -a = A(B()) # type: A -aa = A(object()) # type: A # Fail -b = A(B()) # type: B # Fail class A: def __init__(self, x: 'B') -> None: pass class B: pass -[out] -main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B" -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -[case testConstructorWithTwoArguments] -import typing -a = A(C(), B()) # type: A # Fail +a = A(B()) # type: A +aa = A(object()) # type: A # E: Argument 1 to "A" has incompatible type "object"; expected "B" +b = A(B()) # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +[case testConstructorWithTwoArguments] class A: def __init__(self, x: 'B', y: 'C') -> None: pass class B: pass class C(B): pass -[out] -main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C" + +a = A(C(), B()) # type: A # E: Argument 2 to "A" has incompatible type "B"; expected "C" [case testInheritedConstructor] -import typing -b = B(C()) # type: B -a = B(D()) # type: A # Fail -class A: - def __init__(self, x: 'C') -> None: pass class B(A): pass class C: pass class D: pass -[out] -main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C" + +b = B(C()) # type: B +a = B(D()) # type: A # E: Argument 1 to "B" has incompatible type "D"; expected "C" +class A: + def __init__(self, x: 'C') -> None: pass [case testOverridingWithIncompatibleConstructor] -import typing -A() # Fail -B(C()) # Fail -A(C()) -B() class A: def __init__(self, x: 'C') -> None: pass class B(A): def __init__(self) -> None: pass class C: pass -[out] -main:2: error: Missing positional argument "x" in call to "A" -main:3: error: Too many arguments for "B" + +A() # E: Missing positional argument "x" in call to "A" +B(C()) # E: Too many arguments for "B" +A(C()) +B() [case testConstructorWithReturnValueType] import typing @@ -826,15 +802,12 @@ class Foo: pass [case testGlobalFunctionInitWithReturnType] -import typing -a = __init__() # type: A -b = __init__() # type: B # Fail -def __init__() -> 'A': pass class A: pass class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +def __init__() -> 'A': pass +a = __init__() # type: A +b = __init__() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingInit] from typing import Any, cast class A: @@ -844,7 +817,12 @@ a.__init__(a) # E: Accessing "__init__" on an instance is unsound, since instan (cast(Any, a)).__init__(a) [case testDeepInheritanceHierarchy] -import typing +class A: pass +class B(A): pass +class C(B): pass +class D(C): pass +class D2(C): pass + d = C() # type: D # E: Incompatible types in assignment (expression has type "C", variable has type "D") if int(): d = B() # E: Incompatible types in assignment (expression has type "B", variable has type "D") @@ -859,12 +837,6 @@ b = D() # type: B if int(): b = D2() -class A: pass -class B(A): pass -class C(B): pass -class D(C): pass -class D2(C): pass - [case testConstructorJoinsWithCustomMetaclass] # flags: --strict-optional from typing import TypeVar @@ -1030,7 +1002,7 @@ A.B = None # E: Cannot assign to a type [targets __main__] [case testAccessingClassAttributeWithTypeInferenceIssue] -x = C.x # E: Cannot determine type of "x" +x = C.x # E: Cannot determine type of "x" # E: Name "C" is used before definition def f() -> int: return 1 class C: x = f() @@ -1209,13 +1181,9 @@ class A: [case testMultipleClassDefinition] -import typing -A() -class A: pass class A: pass -[out] -main:4: error: Name "A" already defined on line 3 - +class A: pass # E: Name "A" already defined on line 1 +A() [case testDocstringInClass] import typing class A: @@ -2351,7 +2319,7 @@ reveal_type(Fraction() + Fraction()) # N: Revealed type is "builtins.str" [case testReverseOperatorTypeVar2b] from typing import TypeVar -T = TypeVar("T", Real, Fraction) +T = TypeVar("T", "Real", "Fraction") class Real: def __add__(self, other: Fraction) -> str: ... class Fraction(Real): @@ -2955,7 +2923,11 @@ c.__setattr__("x", 42, p=True) [case testCallableObject] -import typing +class A: + def __call__(self, x: 'A') -> 'A': + pass +class B: pass + a = A() b = B() @@ -2968,11 +2940,6 @@ if int(): if int(): b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") -class A: - def __call__(self, x: A) -> A: - pass -class B: pass - -- __new__ -- -------- @@ -4933,7 +4900,7 @@ reveal_type(x.frob) # N: Revealed type is "def (foos: builtins.dict[Any, __main_ [case testNewTypeFromForwardNamedTuple] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -4947,7 +4914,7 @@ x = NT(N(1)) from typing import NewType, Tuple from mypy_extensions import TypedDict -NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got "N") +NT = NewType('NT', 'N') # E: Argument 2 to NewType(...) must be subclassable (got "N") class N(TypedDict): x: int [builtins fixtures/dict.pyi] @@ -5060,7 +5027,7 @@ def foo(node: Node) -> Node: [case testForwardReferencesInNewTypeMRORecomputed] from typing import NewType x: Foo -Foo = NewType('Foo', B) +Foo = NewType('Foo', 'B') class A: x: int class B(A): @@ -5445,7 +5412,7 @@ class F: [case testCorrectEnclosingClassPushedInDeferred2] from typing import TypeVar -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') class C: def m(self: T) -> T: class Inner: @@ -7059,7 +7026,7 @@ reveal_type(C.__new__) # N: Revealed type is "def (cls: Type[__main__.C]) -> An [case testOverrideGenericSelfClassMethod] from typing import Generic, TypeVar, Type, List -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: @classmethod diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 7c4681c7a709..6748646b65aa 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -211,7 +211,7 @@ y: Dict[int, int] = { [builtins fixtures/dict.pyi] [case testColumnCannotDetermineType] -(x) # E:2: Cannot determine type of "x" +(x) # E:2: Cannot determine type of "x" # E:2: Name "x" is used before definition x = None [case testColumnInvalidIndexing] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index c248f8db8585..631a92f9963b 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1929,9 +1929,9 @@ reveal_type(D) # N: Revealed type is "def (x: builtins.list[b.C]) -> a.D" [file b.py] from typing import List import a -B = List[C] -class C(CC): ... class CC: ... +class C(CC): ... +B = List[C] [builtins fixtures/dataclasses.pyi] [case testDataclassSelfType] diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test index 7b016c342e95..7e62c0d0b0e8 100644 --- a/test-data/unit/check-dynamic-typing.test +++ b/test-data/unit/check-dynamic-typing.test @@ -47,6 +47,10 @@ class B: pass [case testCallingFunctionWithDynamicArgumentTypes] from typing import Any + +def f(x: Any) -> 'A': + pass + a, b = None, None # type: (A, B) if int(): @@ -61,15 +65,16 @@ if int(): if int(): a = f(f) -def f(x: Any) -> 'A': - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testCallingWithDynamicReturnType] from typing import Any + +def f(x: 'A') -> Any: + pass + a, b = None, None # type: (A, B) a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" @@ -77,9 +82,6 @@ a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" a = f(a) b = f(a) -def f(x: 'A') -> Any: - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -283,6 +285,8 @@ class A: pass from typing import Any, cast class A: pass class B: pass +def f() -> None: pass + d = None # type: Any a = None # type: A b = None # type: B @@ -294,10 +298,15 @@ if int(): b = cast(Any, d) if int(): a = cast(Any, f()) -def f() -> None: pass - [case testCompatibilityOfDynamicWithOtherTypes] from typing import Any, Tuple + +def g(a: 'A') -> None: + pass + +class A: pass +class B: pass + d = None # type: Any t = None # type: Tuple[A, A] # TODO: callable types, overloaded functions @@ -308,12 +317,6 @@ d = g d = A t = d f = d - -def g(a: 'A') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -362,6 +365,8 @@ a = None # type: A g = None # type: Callable[[], None] h = None # type: Callable[[A], None] +def f(x): pass + f() # E: Missing positional argument "x" in call to "f" f(x, x) # E: Too many arguments for "f" if int(): @@ -373,8 +378,6 @@ if int(): if int(): h = f -def f(x): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDifferentArgCounts] @@ -384,6 +387,9 @@ g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A +def f0(): pass +def f2(x, y): pass + if int(): g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]") if int(): @@ -400,16 +406,18 @@ if int(): f0() f2(a, a) -def f0(): pass - -def f2(x, y): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDefaultArgs] from typing import Callable +class A: pass +class B: pass + a, b = None, None # type: (A, B) +def f01(x = b): pass +def f13(x, y = b, z = b): pass + g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] @@ -443,11 +451,6 @@ if int(): if int(): g3 = f13 -def f01(x = b): pass -def f13(x, y = b, z = b): pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testSkipTypeCheckingWithImplicitSignature] @@ -550,6 +553,10 @@ f(o, o, o) [case testInitMethodWithImplicitSignature] from typing import Callable + +class A: + def __init__(self, a, b): pass + f1 = None # type: Callable[[A], A] f2 = None # type: Callable[[A, A], A] a = None # type: A @@ -562,20 +569,14 @@ A(a, a) if int(): f2 = A -class A: - def __init__(self, a, b): pass - [case testUsingImplicitTypeObjectWithIs] - -t = None # type: type -t = A -t = B - class A: pass class B: def __init__(self): pass - +t = None # type: type +t = A +t = B -- Type compatibility -- ------------------ diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index db8643455099..9343e8d5c562 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -549,7 +549,7 @@ reveal_type(list(Color)) # N: Revealed type is "builtins.list[__main__.Color]" [case testEnumWorkWithForward] from enum import Enum -a: E = E.x +a: E = E.x # type: ignore[used-before-def] class E(Enum): x = 1 y = 2 diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 798c52629a35..d966eb44b6e3 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -77,8 +77,8 @@ a = 'x'.foobar(b) # type: ignore[xyz, w, attr-defined] # E: Name "b" is not de # N: Error code "name-defined" not covered by "type: ignore" comment [case testErrorCodeIgnoreMultiple2] -a = 'x'.foobar(b) # type: int # type: ignore[name-defined, attr-defined] -b = 'x'.foobar(b) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ +a = 'x'.foobar(c) # type: int # type: ignore[name-defined, attr-defined] +b = 'x'.foobar(c) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ # N: Error code "attr-defined" not covered by "type: ignore" comment [case testErrorCodeWarnUnusedIgnores1] @@ -477,7 +477,7 @@ a['other_commonpart'] = 3 # type: ignore[typeddict-item] [typing fixtures/typing-typeddict.pyi] [case testErrorCodeCannotDetermineType] -y = x # E: Cannot determine type of "x" [has-type] +y = x # E: Cannot determine type of "x" [has-type] # E: Name "x" is used before definition [used-before-def] reveal_type(y) # N: Revealed type is "Any" x = None diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 6b42141b2b15..78ef78e9ad98 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -692,6 +692,7 @@ tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str" [case testDivmod] +# flags: --disable-error-code=used-before-def from typing import Tuple, Union, SupportsInt _Decimal = Union[Decimal, int] class Decimal(SupportsInt): @@ -991,6 +992,15 @@ assert_type(reduce_it(True), Scalar) [case testNoneReturnTypeBasics] +def f() -> None: + pass + +class A: + def g(self, x: object) -> None: + pass + def __call__(self) -> None: + pass + a, o = None, None # type: (A, object) if int(): a = f() # E: "f" does not return a value @@ -1004,40 +1014,30 @@ A().g(f()) # E: "f" does not return a value x: A = f() # E: "f" does not return a value f() A().g(a) - -def f() -> None: - pass - -class A: - def g(self, x: object) -> None: - pass - def __call__(self) -> None: - pass [builtins fixtures/tuple.pyi] [case testNoneReturnTypeWithStatements] import typing -if f(): # Fail +def f() -> None: pass + +if f(): # E: "f" does not return a value pass -elif f(): # Fail +elif f(): # E: "f" does not return a value pass -while f(): # Fail +while f(): # E: "f" does not return a value pass def g() -> object: - return f() # Fail -raise f() # Fail - -def f() -> None: pass + return f() # E: "f" does not return a value +raise f() # E: "f" does not return a value [builtins fixtures/exception.pyi] -[out] -main:2: error: "f" does not return a value -main:4: error: "f" does not return a value -main:6: error: "f" does not return a value -main:9: error: "f" does not return a value -main:10: error: "f" does not return a value [case testNoneReturnTypeWithExpressions] from typing import cast + +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': pass + a = None # type: A [f()] # E: "f" does not return a value f() + a # E: "f" does not return a value @@ -1046,15 +1046,16 @@ f() == a # E: "f" does not return a value a != f() # E: "f" does not return a value cast(A, f()) f().foo # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] import typing +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': + pass + a, b = None, None # type: (A, bool) f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") a < f() # E: "f" does not return a value @@ -1064,11 +1065,6 @@ a in f() # E: "f" does not return a value not f() # E: "f" does not return a value f() and b # E: "f" does not return a value b or f() # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': - pass [builtins fixtures/bool.pyi] @@ -1424,19 +1420,13 @@ z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not retu from typing import cast class A: def __add__(self, a: 'A') -> 'A': pass -a = None # type: A -None + a # Fail -f + a # Fail -a + f # Fail -cast(A, f) - def f() -> None: pass -[out] -main:5: error: Unsupported left operand type for + ("None") -main:6: error: Unsupported left operand type for + ("Callable[[], None]") -main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]") - +a = None # type: A +None + a # E: Unsupported left operand type for + ("None") +f + a # E: Unsupported left operand type for + ("Callable[[], None]") +a + f # E: Unsupported operand types for + ("A" and "Callable[[], None]") +cast(A, f) [case testOperatorMethodWithInvalidArgCount] a = None # type: A diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index a76463e3106b..ebb3744e9f08 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1152,13 +1152,13 @@ from typing import Any def f(s): yield s +def g(x) -> Any: + yield x # E: Expression has type "Any" + x = f(0) # E: Expression has type "Any" for x in f(0): # E: Expression has type "Any" g(x) # E: Expression has type "Any" -def g(x) -> Any: - yield x # E: Expression has type "Any" - l = [1, 2, 3] l[f(0)] # E: Expression has type "Any" f(l) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 9afe9189caaa..c23bbb77f643 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -337,6 +337,11 @@ class A: pass [out] [case testCompatibilityOfSimpleTypeObjectWithStdType] +class A: + def __init__(self, a: 'A') -> None: pass + +def f() -> None: pass + t = None # type: type a = None # type: A @@ -347,11 +352,6 @@ if int(): if int(): t = A -class A: - def __init__(self, a: 'A') -> None: pass - -def f() -> None: pass - [case testFunctionTypesWithOverloads] from foo import * [file foo.pyi] @@ -466,6 +466,11 @@ if int(): [case testCallingFunctionsWithDefaultArgumentValues] # flags: --implicit-optional --no-strict-optional +class A: pass +class AA(A): pass +class B: pass + +def f(x: 'A' = None) -> 'B': pass a, b = None, None # type: (A, B) if int(): @@ -482,11 +487,6 @@ if int(): if int(): b = f(AA()) -def f(x: 'A' = None) -> 'B': pass - -class A: pass -class AA(A): pass -class B: pass [builtins fixtures/tuple.pyi] [case testDefaultArgumentExpressions] @@ -1133,6 +1133,7 @@ def dec(f: T) -> T: [out] [case testForwardReferenceToFunctionWithMultipleDecorators] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -1167,6 +1168,7 @@ def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -2608,9 +2610,9 @@ import p def f() -> int: ... [case testLambdaDefaultTypeErrors] -lambda a=nonsense: a # E: Name "nonsense" is not defined lambda a=(1 + 'asdf'): a # E: Unsupported operand types for + ("int" and "str") -def f(x: int = i): # E: Name "i" is not defined +lambda a=nonsense: a # E: Name "nonsense" is not defined +def f(x: int = i): # E: Name "i" is not defined # E: Name "i" is used before definition i = 42 [case testRevealTypeOfCallExpressionReturningNoneWorks] diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 27441ce908fe..1be3145b3b10 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -20,21 +20,19 @@ class C: pass [case testGenericMethodArgument] from typing import TypeVar, Generic T = TypeVar('T') -a.f(c) # Fail -a.f(b) + +class A(Generic[T]): + def f(self, a: T) -> None: pass a = None # type: A[B] b = None # type: B c = None # type: C -class A(Generic[T]): - def f(self, a: T) -> None: pass +a.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" +a.f(b) class B: pass class C: pass -[out] -main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" - [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') @@ -661,10 +659,10 @@ main:3: error: Type argument "float" of "Array" must be a subtype of "generic" [file other.py] from typing import Any, Generic, TypeVar -DT = TypeVar("DT", covariant=True, bound=dtype[Any]) -DTS = TypeVar("DTS", covariant=True, bound=generic) +DT = TypeVar("DT", covariant=True, bound='dtype[Any]') +DTS = TypeVar("DTS", covariant=True, bound='generic') S = TypeVar("S", bound=Any) -ST = TypeVar("ST", bound=generic, covariant=True) +ST = TypeVar("ST", bound='generic', covariant=True) class common: pass class generic(common): pass @@ -1383,10 +1381,11 @@ Z = TypeVar('Z') class OO: pass a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object] -f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" - def f(a: OO) -> None: pass + +f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" + class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass [case testErrorWithShorterGenericTypeName] @@ -1394,9 +1393,10 @@ from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, B] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -1405,9 +1405,10 @@ from typing import Callable, TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, Callable[[], None]] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -2143,7 +2144,7 @@ from typing import Generic, TypeVar, Any, Tuple, Type T = TypeVar('T') S = TypeVar('S') -Q = TypeVar('Q', bound=A[Any]) +Q = TypeVar('Q', bound='A[Any]') class A(Generic[T]): @classmethod diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index e5b69fb6fb9d..60917db041a1 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -2507,7 +2507,7 @@ A = Tuple[int] [case testNewTypeFromForwardNamedTupleIncremental] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -2591,8 +2591,8 @@ class C(NamedTuple): # type: ignore from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') -IntNode = Node[int, S] -AnyNode = Node[S, T] +IntNode = Node[int, S] # type: ignore[used-before-def] +AnyNode = Node[S, T] # type: ignore[used-before-def] class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: @@ -2642,8 +2642,8 @@ class G(Generic[T]): x: T yg: G[M] -z: int = G[M]().x.x -z = G[M]().x[0] +z: int = G[M]().x.x # type: ignore[used-before-def] +z = G[M]().x[0] # type: ignore[used-before-def] M = NamedTuple('M', [('x', int)]) [builtins fixtures/tuple.pyi] [out] diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index f80f93eb2615..625ab091a6a9 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -7,6 +7,12 @@ [case testBasicContextInference] from typing import TypeVar, Generic T = TypeVar('T') + +def f() -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -17,15 +23,11 @@ if int(): ab = f() if int(): b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -def f() -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') +class A(Generic[T]): pass +class B: pass ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -36,13 +38,16 @@ if int(): ab = A() if int(): b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -class A(Generic[T]): pass -class B: pass - [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') +def f(a: T) -> 'A[T]': + pass + +class A(Generic[T]): pass + +class B: pass +class C: pass b = None # type: B c = None # type: C ab = None # type: A[B] @@ -63,14 +68,6 @@ if int(): if int(): ac = f(c) -def f(a: T) -> 'A[T]': - pass - -class A(Generic[T]): pass - -class B: pass -class C: pass - -- Local variables -- --------------- @@ -159,6 +156,12 @@ class B: pass [case testInferenceWithTypeVariableTwiceInReturnType] from typing import TypeVar, Tuple, Generic T = TypeVar('T') + +def f(a: T) -> 'Tuple[A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -175,17 +178,20 @@ if int(): ab, ab = f(b) if int(): ao, ao = f(o) - -def f(a: T) -> 'Tuple[A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables] from typing import TypeVar, Tuple, Generic S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass +def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass +def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -206,13 +212,6 @@ if int(): ab, ab, ao = g(b, b) if int(): ab, ab, ab, ab = h(b, b) - -def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass -def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass -def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] @@ -223,6 +222,13 @@ class B: pass [case testMultipleTvatInstancesInArgs] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T, b: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass +class C(B): pass + ac = None # type: A[C] ab = None # type: A[B] ao = None # type: A[object] @@ -246,12 +252,6 @@ if int(): if int(): ab = f(c, b) -def f(a: T, b: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass -class C(B): pass - -- Nested generic function calls -- ----------------------------- @@ -260,6 +260,12 @@ class C(B): pass [case testNestedGenericFunctionCall1] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + aab = None # type: A[A[B]] aao = None # type: A[A[object]] ao = None # type: A[object] @@ -273,15 +279,16 @@ if int(): aab = f(f(b)) aao = f(f(b)) ao = f(f(b)) +[case testNestedGenericFunctionCall2] +from typing import TypeVar, Generic +T = TypeVar('T') -def f(a: T) -> 'A[T]': pass +def f(a: T) -> T: pass +def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass -[case testNestedGenericFunctionCall2] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -293,17 +300,16 @@ if int(): if int(): ab = f(g(b)) ao = f(g(b)) - -def f(a: T) -> T: pass +[case testNestedGenericFunctionCall3] +from typing import TypeVar, Generic +T = TypeVar('T') +def f(a: T, b: T) -> T: + pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass - -[case testNestedGenericFunctionCall3] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -320,14 +326,6 @@ if int(): if int(): ao = f(g(o), g(b)) -def f(a: T, b: T) -> T: - pass - -def g(a: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - -- Method calls -- ------------ @@ -339,6 +337,13 @@ T = TypeVar('T') o = None # type: object b = None # type: B c = None # type: C +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): + def g(self, a: 'A[T]') -> 'A[T]': pass + +class B: pass +class C(B): pass ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] @@ -353,14 +358,6 @@ if int(): ab = f(b).g(f(c)) ab.g(f(c)) -def f(a: T) -> 'A[T]': pass - -class A(Generic[T]): - def g(self, a: 'A[T]') -> 'A[T]': pass - -class B: pass -class C(B): pass - -- List expressions -- ---------------- @@ -461,8 +458,8 @@ class B: pass [case testParenthesesAndContext] from typing import List -l = ([A()]) # type: List[object] class A: pass +l = ([A()]) # type: List[object] [builtins fixtures/list.pyi] [case testComplexTypeInferenceWithTuple] @@ -470,14 +467,15 @@ from typing import TypeVar, Tuple, Generic k = TypeVar('k') t = TypeVar('t') v = TypeVar('v') -def f(x: Tuple[k]) -> 'A[k]': pass - -d = f((A(),)) # type: A[A[B]] class A(Generic[t]): pass class B: pass class C: pass class D(Generic[k, v]): pass + +def f(x: Tuple[k]) -> 'A[k]': pass + +d = f((A(),)) # type: A[A[B]] [builtins fixtures/list.pyi] @@ -505,12 +503,12 @@ d = {A() : a_c, [case testInitializationWithInferredGenericType] from typing import TypeVar, Generic T = TypeVar('T') -c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" def f(x: T) -> T: pass class C(Generic[T]): pass class A: pass +c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" [case testInferredGenericTypeAsReturnValue] from typing import TypeVar, Generic T = TypeVar('T') @@ -544,9 +542,6 @@ class B: pass from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -x = A() # type: I[int] -a_object = A() # type: A[object] -y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") class I(Generic[t]): @abstractmethod @@ -554,16 +549,20 @@ class I(Generic[t]): class A(I[t], Generic[t]): def f(self): pass +x = A() # type: I[int] +a_object = A() # type: A[object] +y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") + [case testInferenceWithAbstractClassContext2] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -a = f(A()) # type: A[int] -a_int = A() # type: A[int] -aa = f(a_int) class I(Generic[t]): pass class A(I[t], Generic[t]): pass def f(i: I[t]) -> A[t]: pass +a = f(A()) # type: A[int] +a_int = A() # type: A[int] +aa = f(a_int) [case testInferenceWithAbstractClassContext3] from typing import TypeVar, Generic, Iterable @@ -585,9 +584,9 @@ if int(): from typing import Any, TypeVar, Generic s = TypeVar('s') t = TypeVar('t') +class C(Generic[s, t]): pass x = [] # type: Any y = C() # type: Any -class C(Generic[s, t]): pass [builtins fixtures/list.pyi] @@ -737,6 +736,9 @@ a = m # type: List[A] # E: Incompatible types in assignment (expression has type [case testOrOperationInferredFromContext] from typing import List +class A: pass +class B: pass +class C(B): pass a, b, c = None, None, None # type: (List[A], List[B], List[C]) if int(): a = a or [] @@ -748,10 +750,6 @@ if int(): a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]") if int(): b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]") - -class A: pass -class B: pass -class C(B): pass [builtins fixtures/list.pyi] @@ -765,38 +763,38 @@ t = TypeVar('t') s = TypeVar('s') # Some type variables can be inferred using context, but not all of them. a = None # type: List[A] +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testSomeTypeVarsInferredFromContext2] from typing import List, TypeVar s = TypeVar('s') t = TypeVar('t') +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass # Like testSomeTypeVarsInferredFromContext, but tvars in different order. a = None # type: List[A] if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - [lambda x: x], []) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass +map( + [lambda x: x], []) [builtins fixtures/list.pyi] [out] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 45a833e5210c..41fe942b8339 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3,7 +3,9 @@ [case testInferSimpleGvarType] -import typing +class A: pass +class B: pass + x = A() y = B() if int(): @@ -14,9 +16,6 @@ if int(): x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = x -class A: pass -class B: pass - [case testInferSimpleLvarType] import typing def f() -> None: @@ -401,6 +400,8 @@ a = None # type: A b = None # type: B c = None # type: Tuple[A, object] +def id(a: T) -> T: pass + if int(): b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -412,8 +413,6 @@ if int(): b = id(b) c = id(c) -def id(a: T) -> T: pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -444,20 +443,26 @@ def ff() -> None: x = f() # E: Need type annotation for "x" reveal_type(x) # N: Revealed type is "Any" +def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar +def g(a: T) -> None: pass + g(None) # Ok f() # Ok because not used to infer local variable type g(a) - -def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar -def g(a: T) -> None: pass [out] [case testInferenceWithMultipleConstraints] from typing import TypeVar + +class A: pass +class B(A): pass + T = TypeVar('T') a = None # type: A b = None # type: B +def f(a: T, b: T) -> T: pass + if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): @@ -467,15 +472,16 @@ if int(): if int(): a = f(b, a) -def f(a: T, b: T) -> T: pass - -class A: pass -class B(A): pass - [case testInferenceWithMultipleVariables] from typing import Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') + +def f(a: T, b: S) -> Tuple[T, S]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) taa = None # type: Tuple[A, A] tab = None # type: Tuple[A, B] @@ -493,11 +499,6 @@ if int(): tab = f(a, b) if int(): tba = f(b, a) - -def f(a: T, b: S) -> Tuple[T, S]: pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testConstraintSolvingWithSimpleGenerics] @@ -507,6 +508,14 @@ ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] +def f(a: 'A[T]') -> 'A[T]': pass + +def g(a: T) -> T: pass + +class A(Generic[T]): pass +class B: pass +class C: pass + if int(): ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]" ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]" @@ -524,37 +533,33 @@ if int(): if int(): ab = g(ab) ao = g(ao) - -def f(a: 'A[T]') -> 'A[T]': pass - -def g(a: T) -> T: pass - -class A(Generic[T]): pass -class B: pass -class C: pass - [case testConstraintSolvingFailureWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] -f(ao, ab) # E: Cannot infer type argument 1 of "f" -f(ab, ao) # E: Cannot infer type argument 1 of "f" -f(ao, ao) -f(ab, ab) - def f(a: 'A[T]', b: 'A[T]') -> None: pass class A(Generic[T]): pass class B: pass + +f(ao, ab) # E: Cannot infer type argument 1 of "f" +f(ab, ao) # E: Cannot infer type argument 1 of "f" +f(ao, ao) +f(ab, ab) [case testTypeInferenceWithCalleeDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T = None) -> T: pass +def g(a: T, b: T = None) -> T: pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -569,11 +574,6 @@ if int(): if int(): a = g(a) -def f(a: T = None) -> T: pass -def g(a: T, b: T = None) -> T: pass - -class A: pass - -- Generic function inference with multiple inheritance -- ---------------------------------------------------- @@ -655,6 +655,12 @@ g(c) [case testPrecedenceOfFirstBaseAsInferenceResult] from typing import TypeVar from abc import abstractmethod, ABCMeta +class A: pass +class B(A, I, J): pass +class C(A, I, J): pass + +def f(a: T, b: T) -> T: pass + T = TypeVar('T') a, i, j = None, None, None # type: (A, I, J) @@ -663,11 +669,7 @@ a = f(B(), C()) class I(metaclass=ABCMeta): pass class J(metaclass=ABCMeta): pass -def f(a: T, b: T) -> T: pass -class A: pass -class B(A, I, J): pass -class C(A, I, J): pass [builtins fixtures/tuple.pyi] @@ -966,6 +968,9 @@ list_2 = [f, h] [case testInferenceOfFor1] a, b = None, None # type: (A, B) +class A: pass +class B: pass + for x in [A()]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x @@ -973,40 +978,32 @@ for x in [A()]: for y in []: # E: Need type annotation for "y" a = y reveal_type(y) # N: Revealed type is "Any" - -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testInferenceOfFor2] +class A: pass +class B: pass +class C: pass a, b, c = None, None, None # type: (A, B, C) for x, (y, z) in [(A(), (B(), C()))]: - b = x # Fail - c = y # Fail - a = z # Fail + b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") + c = y # E: Incompatible types in assignment (expression has type "B", variable has type "C") + a = z # E: Incompatible types in assignment (expression has type "C", variable has type "A") a = x b = y c = z -for xx, yy, zz in [(A(), B())]: # Fail +for xx, yy, zz in [(A(), B())]: # E: Need more than 2 values to unpack (3 expected) pass -for xx, (yy, zz) in [(A(), B())]: # Fail +for xx, (yy, zz) in [(A(), B())]: # E: "B" object is not iterable pass for xxx, yyy in [(None, None)]: pass - -class A: pass -class B: pass -class C: pass [builtins fixtures/for.pyi] -[out] -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C") -main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A") -main:10: error: Need more than 2 values to unpack (3 expected) -main:12: error: "B" object is not iterable [case testInferenceOfFor3] +class A: pass +class B: pass a, b = None, None # type: (A, B) @@ -1021,19 +1018,21 @@ for e, f in [[]]: # E: Need type annotation for "e" \ reveal_type(e) # N: Revealed type is "Any" reveal_type(f) # N: Revealed type is "Any" -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testForStatementInferenceWithVoid] -import typing +def f() -> None: pass + for x in f(): # E: "f" does not return a value pass -def f() -> None: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex] import typing + +class A: pass +class B: pass + for a in [A()]: pass a = A() if int(): @@ -1041,8 +1040,6 @@ if int(): for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex2] @@ -1128,15 +1125,15 @@ if int(): class A: pass [case testInferGlobalDefinedInBlock] -import typing +class A: pass +class B: pass + if A: a = A() if int(): a = A() if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [case testAssigningAnyStrToNone] from typing import Tuple, TypeVar @@ -1314,7 +1311,7 @@ class A: pass [case testAccessGlobalVarBeforeItsTypeIsAvailable] import typing -x.y # E: Cannot determine type of "x" +x.y # E: Cannot determine type of "x" # E: Name "x" is used before definition x = object() x.y # E: "object" has no attribute "y" @@ -1977,7 +1974,7 @@ class A: [out] [case testMultipassAndTopLevelVariable] -y = x # E: Cannot determine type of "x" +y = x # E: Cannot determine type of "x" # E: Name "x" is used before definition y() x = 1+int() [out] @@ -2160,7 +2157,7 @@ from typing import TypeVar, Callable T = TypeVar('T') def dec() -> Callable[[T], T]: pass -A.g # E: Cannot determine type of "g" +A.g # E: Cannot determine type of "g" # E: Name "A" is used before definition class A: @classmethod @@ -2990,13 +2987,14 @@ class C: [case testUnionGenericWithBoundedVariable] from typing import Generic, TypeVar, Union +class A: ... +class B(A): ... + T = TypeVar('T', bound=A) class Z(Generic[T]): def __init__(self, y: T) -> None: self.y = y -class A: ... -class B(A): ... F = TypeVar('F', bound=A) def q1(x: Union[F, Z[F]]) -> F: diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index ace28a18a5a8..e0fe389bbbd9 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -8,24 +8,27 @@ f(o=None()) # E: "None" not callable [case testSimpleKeywordArgument] import typing +class A: pass def f(a: 'A') -> None: pass f(a=A()) f(a=object()) # E: Argument "a" to "f" has incompatible type "object"; expected "A" -class A: pass [case testTwoKeywordArgumentsNotInOrder] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(b=A(), a=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(b=B(), a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "A" f(a=A(), b=B()) f(b=B(), a=A()) -class A: pass -class B: pass [case testOneOfSeveralOptionalKeywordArguments] # flags: --implicit-optional import typing +class A: pass +class B: pass +class C: pass def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass f(a=A()) f(b=B()) @@ -35,39 +38,34 @@ f(a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "Optional[ f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(c=B()) # E: Argument "c" to "f" has incompatible type "B"; expected "Optional[C]" f(b=B(), c=A()) # E: Argument "c" to "f" has incompatible type "A"; expected "Optional[C]" -class A: pass -class B: pass -class C: pass - [case testBothPositionalAndKeywordArguments] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(A(), b=B()) -class A: pass -class B: pass [case testContextSensitiveTypeInferenceForKeywordArg] from typing import List +class A: pass def f(a: 'A', b: 'List[A]') -> None: pass f(b=[], a=A()) -class A: pass [builtins fixtures/list.pyi] [case testGivingArgumentAsPositionalAndKeywordArg] import typing -def f(a: 'A', b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +def f(a: 'A', b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testGivingArgumentAsPositionalAndKeywordArg2] import typing -def f(a: 'A' = None, b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass - +def f(a: 'A' = None, b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPositionalAndKeywordForSameArg] # This used to crash in check_argument_count(). See #1095. def f(a: int): pass @@ -81,28 +79,28 @@ f(b=object()) # E: Unexpected keyword argument "b" for "f" class A: pass [case testKeywordMisspelling] +class A: pass def f(other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [case testMultipleKeywordsForMisspelling] -def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "other"? class A: pass class B: pass +def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "other"? [case testKeywordMisspellingDifferentType] -def f(other: 'A') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'A') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [case testKeywordMisspellingInheritance] -def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? class A: pass class B(A): pass class C: pass +def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingFloatInt] def f(atter: float, btter: int) -> None: pass # N: "f" defined here @@ -110,28 +108,28 @@ x: int = 5 f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingVarArgs] +class A: pass def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingOnlyVarArgs] +class A: pass def f(*other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f" -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarArgsDifferentTypes] -def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarKwargs] +class A: pass def f(other: 'A', **atter: 'A') -> None: pass f(otter=A()) # E: Missing positional argument "other" in call to "f" -class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentsWithDynamicallyTypedCallable] @@ -143,18 +141,15 @@ f(x=None) [case testKeywordArgumentWithFunctionObject] from typing import Callable -f = None # type: Callable[[A, B], None] -f(a=A(), b=B()) -f(A(), b=B()) class A: pass class B: pass -[out] -main:3: error: Unexpected keyword argument "a" -main:3: error: Unexpected keyword argument "b" -main:4: error: Unexpected keyword argument "b" - +f = None # type: Callable[[A, B], None] +f(a=A(), b=B()) # E: Unexpected keyword argument "a" # E: Unexpected keyword argument "b" +f(A(), b=B()) # E: Unexpected keyword argument "b" [case testKeywordOnlyArguments] import typing +class A: pass +class B: pass def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -177,13 +172,12 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) +[case testKeywordOnlyArgumentsFastparse] +import typing class A: pass class B: pass -[case testKeywordOnlyArgumentsFastparse] - -import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -206,10 +200,6 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) - -class A: pass -class B: pass - [case testKwargsAfterBareArgs] from typing import Tuple, Any def f(a, *, b=None) -> None: pass @@ -222,6 +212,8 @@ f(a, **b) [case testKeywordArgAfterVarArgs] # flags: --implicit-optional import typing +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass f() f(A()) @@ -232,13 +224,13 @@ f(A(), A(), b=B()) f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs] # flags: --implicit-optional --no-strict-optional from typing import List +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass a = None # type: List[A] f(*a) @@ -249,18 +241,16 @@ f(A(), *a, b=B()) f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(*a, b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallingDynamicallyTypedFunctionWithKeywordArgs] import typing +class A: pass def f(x, y=A()): pass # N: "f" defined here f(x=A(), y=A()) f(y=A(), x=A()) f(y=A()) # E: Missing positional argument "x" in call to "f" f(A(), z=A()) # E: Unexpected keyword argument "z" for "f" -class A: pass [case testKwargsArgumentInFunctionBody] from typing import Dict, Any @@ -284,6 +274,8 @@ class A: pass [case testCallingFunctionThatAcceptsVarKwargs] import typing +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass f() f(x=A()) @@ -291,12 +283,12 @@ f(y=A(), z=A()) f(x=B()) # E: Argument "x" to "f" has incompatible type "B"; expected "A" f(A()) # E: Too many arguments for "f" # Perhaps a better message would be "Too many *positional* arguments..." -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testCallingFunctionWithKeywordVarArgs] from typing import Dict +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass d = None # type: Dict[str, A] f(**d) @@ -305,8 +297,6 @@ d2 = None # type: Dict[str, B] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(**{'x': B()}) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testKwargsAllowedInDunderCall] @@ -355,11 +345,11 @@ class A: pass [case testInvalidTypeForKeywordVarArg] # flags: --strict-optional from typing import Dict, Any, Optional +class A: pass def f(**kwargs: 'A') -> None: pass d = {} # type: Dict[A, A] f(**d) # E: Keywords must be strings f(**A()) # E: Argument after ** must be a mapping, not "A" -class A: pass kwargs: Optional[Any] f(**kwargs) # E: Argument after ** must be a mapping, not "Optional[Any]" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 4eda14c2c592..6b9f139f541c 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -561,6 +561,7 @@ A # E: Name "A" is not defined [builtins fixtures/tuple.pyi] [case testNamedTupleForwardAsUpperBound] +# flags: --disable-error-code=used-before-def from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): @@ -723,7 +724,7 @@ reveal_type(n.y[0]) # N: Revealed type is "Any" from typing import NamedTuple B = NamedTuple('B', [ - ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) + ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) # E: Name "A" is used before definition ('y', int), ]) A = NamedTuple('A', [ @@ -904,6 +905,7 @@ if not b: [builtins fixtures/tuple.pyi] [case testNamedTupleDoubleForward] +# flags: --disable-error-code=used-before-def from typing import Union, Mapping, NamedTuple class MyBaseTuple(NamedTuple): diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 97cf1ef1494d..99f4141a4d64 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -434,7 +434,7 @@ def main() -> None: x # E: Name "x" is not defined [case testNewAnalyzerCyclicDefinitions] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def gx = gy # E: Cannot resolve name "gy" (possible cyclic definition) gy = gx def main() -> None: @@ -521,12 +521,6 @@ reveal_type(b.x) # N: Revealed type is "builtins.int" reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerNestedClass2] -b: A.B -b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" -reveal_type(b) # N: Revealed type is "__main__.A.B" -reveal_type(b.x) # N: Revealed type is "builtins.int" -reveal_type(b.f()) # N: Revealed type is "builtins.str" - class A: class B: x: int @@ -537,17 +531,14 @@ class A: def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") +b: A.B +b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" +reveal_type(b) # N: Revealed type is "__main__.A.B" +reveal_type(b.x) # N: Revealed type is "builtins.int" +reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerGenerics] from typing import TypeVar, Generic -c: C[int] -c2: C[int, str] # E: "C" expects 1 type argument, but 2 given -c3: C -c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" -reveal_type(c.get()) # N: Revealed type is "builtins.int" -reveal_type(c2) # N: Revealed type is "__main__.C[Any]" -reveal_type(c3) # N: Revealed type is "__main__.C[Any]" - T = TypeVar('T') class C(Generic[T]): @@ -557,6 +548,13 @@ class C(Generic[T]): def get(self) -> T: return self.x +c: C[int] +c2: C[int, str] # E: "C" expects 1 type argument, but 2 given +c3: C +c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" +reveal_type(c.get()) # N: Revealed type is "builtins.int" +reveal_type(c2) # N: Revealed type is "__main__.C[Any]" +reveal_type(c3) # N: Revealed type is "__main__.C[Any]" [case testNewAnalyzerGenericsTypeVarForwardRef] from typing import TypeVar, Generic @@ -577,6 +575,12 @@ reveal_type(c.get()) # N: Revealed type is "builtins.int" [case testNewAnalyzerTypeAlias] from typing import Union, TypeVar, Generic +T = TypeVar('T') +S = TypeVar('S') +class D(Generic[T, S]): pass + +class C: pass + C2 = C U = Union[C, int] G = D[T, C] @@ -587,13 +591,6 @@ u: U reveal_type(u) # N: Revealed type is "Union[__main__.C, builtins.int]" g: G[int] reveal_type(g) # N: Revealed type is "__main__.D[builtins.int, __main__.C]" - -class C: pass - -T = TypeVar('T') -S = TypeVar('S') -class D(Generic[T, S]): pass - [case testNewAnalyzerTypeAlias2] from typing import Union @@ -678,13 +675,14 @@ a.f(1.0) # E: No overload variant of "f" of "A" matches argument type "float" \ # N: def f(self, x: str) -> str [case testNewAnalyzerPromotion] +def f(x: float) -> None: pass y: int f(y) f(1) -def f(x: float) -> None: pass [builtins fixtures/primitives.pyi] [case testNewAnalyzerFunctionDecorator] +# flags: --disable-error-code used-before-def from typing import Callable @dec @@ -702,6 +700,7 @@ reveal_type(f1('')) # N: Revealed type is "builtins.str" f2(1) # E: Argument 1 to "f2" has incompatible type "int"; expected "str" [case testNewAnalyzerTypeVarForwardReference] +# flags: --disable-error-code used-before-def from typing import TypeVar, Generic T = TypeVar('T') @@ -721,7 +720,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -737,7 +736,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -755,7 +754,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -TY = TypeVar('TY', bound=Y) +TY = TypeVar('TY', bound='Y') class C(Generic[T]): pass @@ -775,7 +774,7 @@ class C(Generic[T]): def func(x: U) -> U: ... U = TypeVar('U', asdf, asdf) # E: Name "asdf" is not defined -T = TypeVar('T', bound=asdf) # E: Name "asdf" is not defined +T = TypeVar('T', bound='asdf') # E: Name "asdf" is not defined reveal_type(C) # N: Revealed type is "def [T <: Any] (x: T`1) -> __main__.C[T`1]" reveal_type(func) # N: Revealed type is "def [U in (Any, Any)] (x: U`-1) -> U`-1" @@ -799,16 +798,16 @@ T = TypeVar('T') class A(Generic[T]): pass -a1: A[C] = C() -a2: A[D] = C() \ - # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") - class C(A[C]): pass -class D(A[D]): +class D(A['D']): pass +a1: A[C] = C() +a2: A[D] = C() \ + # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") + [case testNewAnalyzerTypeVarBoundForwardRef] from typing import TypeVar @@ -855,19 +854,17 @@ def f(): pass [case testNewAnalyzerNamedTupleCall] from typing import NamedTuple -o: Out -i: In +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) Out = NamedTuple('Out', [('x', In), ('y', Other)]) - +o: Out +i: In reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.y) # N: Revealed type is "__main__.Other" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(i.t) # N: Revealed type is "__main__.Other" - -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClass] @@ -936,29 +933,23 @@ class C: [case testNewAnalyzerNamedTupleCallNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11], __main__.Other@12, fallback=__main__.C.Out@10]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11]" - class C: def get_tuple(self) -> None: - self.o: Out - Out = NamedTuple('Out', [('x', In), ('y', Other)]) - In = NamedTuple('In', [('s', str), ('t', Other)]) + Out = NamedTuple('Out', [('x', 'In'), ('y', 'Other')]) + In = NamedTuple('In', [('s', str), ('t', 'Other')]) class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6], __main__.Other@7, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15], __main__.Other@18, fallback=__main__.C.Out@11]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" -reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" - class C: def get_tuple(self) -> None: - self.o: Out class Out(NamedTuple): x: In y: Other @@ -967,6 +958,12 @@ class C: s: str t: Other class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9], __main__.Other@12, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" +reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassForwardMethod] @@ -988,34 +985,31 @@ class Other(NamedTuple): [case testNewAnalyzerNamedTupleSpecialMethods] from typing import NamedTuple +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) +Out = NamedTuple('Out', [('x', In), ('y', Other)]) +class SubO(Out): pass + o: SubO reveal_type(SubO._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" reveal_type(o._replace(y=Other())) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" - -class SubO(Out): pass - -Out = NamedTuple('Out', [('x', In), ('y', Other)]) -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleBaseClass] from typing import NamedTuple +class Other: pass +class In(NamedTuple): + s: str + t: Other +class Out(NamedTuple('Out', [('x', In), ('y', Other)])): + pass o: Out reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(Out._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" - -class Out(NamedTuple('Out', [('x', In), ('y', Other)])): - pass - -class In(NamedTuple): - s: str - t: Other -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerIncompleteRefShadowsBuiltin1] @@ -1134,7 +1128,11 @@ class B(type): reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclass2] -reveal_type(A.f()) # N: Revealed type is "builtins.int" +class B(type): + def f(cls) -> int: + return 0 + +class C: pass class A(metaclass=B): pass @@ -1142,12 +1140,7 @@ class A(metaclass=B): class AA(metaclass=C): # E: Metaclasses not inheriting from "type" are not supported pass -class B(type): - def f(cls) -> int: - return 0 - -class C: pass - +reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclassPlaceholder] class B(C): pass @@ -1211,14 +1204,14 @@ class B(type): def f(cls) -> int: return 0 -reveal_type(A.f()) # N: Revealed type is "builtins.int" -reveal_type(A.x) # N: Revealed type is "builtins.str" - class A(six.with_metaclass(B, Defer)): pass class Defer: x: str + +reveal_type(A.f()) # N: Revealed type is "builtins.int" +reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture1] @@ -1252,6 +1245,7 @@ reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture4] +# flags: --disable-error-code used-before-def import future.utils class B(type): @@ -1271,31 +1265,32 @@ class Defer: [case testNewAnalyzerFinalDefiningModuleVar] from typing import Final +class D(C): ... +class C: ... + x: Final = C() y: Final[C] = D() bad: Final[D] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") reveal_type(x) # N: Revealed type is "__main__.C" reveal_type(y) # N: Revealed type is "__main__.C" -class D(C): ... -class C: ... - [case testNewAnalyzerFinalDefiningInstanceVar] from typing import Final +class D: ... +class E(C): ... + class C: def __init__(self, x: D) -> None: self.x: Final = x self.y: Final[C] = E(D()) reveal_type(C(D()).x) # N: Revealed type is "__main__.D" reveal_type(C(D()).y) # N: Revealed type is "__main__.C" - -class D: ... -class E(C): ... - [case testNewAnalyzerFinalReassignModuleVar] from typing import Final +class A: ... + x: Final = A() x = A() # E: Cannot assign to final name "x" @@ -1308,8 +1303,6 @@ def f2() -> None: def g() -> None: f() -class A: ... - [case testNewAnalyzerFinalReassignModuleReexport] import a [file a.py] @@ -1382,6 +1375,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.A" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClass3] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1461,13 +1455,13 @@ from typing import List, TypeVar, Union T = TypeVar('T') x: B[int] -B = A[List[T]] A = Union[int, T] +B = A[List[T]] class C(List[B[int]]): pass +y: C reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" reveal_type(y[0]) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" -y: C [builtins fixtures/list.pyi] [case testNewAnalyzerForwardAliasFromUnion] @@ -1488,6 +1482,7 @@ class C: [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyTwoDeferrals] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1500,7 +1495,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBase] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import List x: B @@ -1518,6 +1513,7 @@ main:8: note: Revealed type is "Any" main:9: note: Revealed type is "Any" [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1534,7 +1530,7 @@ reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBaseFunction] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1558,11 +1554,11 @@ tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition) from typing import List, Union x: A -A = Union[B, C] - class B(List[A]): pass class C(List[A]): pass +A = Union[B, C] + reveal_type(x) # N: Revealed type is "Union[__main__.B, __main__.C]" reveal_type(x[0]) # N: Revealed type is "Union[__main__.B, __main__.C]" [builtins fixtures/list.pyi] @@ -1578,19 +1574,18 @@ reveal_type(func()) # N: Revealed type is "builtins.list[Tuple[b.C, b.C]]" from typing import List, Tuple from a import func -B = List[Tuple[C, C]] - -class C(A): ... class A: ... +class C(A): ... +B = List[Tuple[C, C]] [builtins fixtures/list.pyi] [case testNewAnalyzerListComprehension] from typing import List +class A: pass +class B: pass a: List[A] a = [x for x in a] b: List[B] = [x for x in a] # E: List comprehension has incompatible type List[A]; expected List[B] -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testNewAnalyzerDictionaryComprehension] @@ -1796,23 +1791,26 @@ a.y = 1 # E: Incompatible types in assignment (expression has type "int", varia [case testNewAnalyzerAliasesFixedFew] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... +A = List[C] +x: A def func(x: List[C[T]]) -> T: ... -x: A -A = List[C] reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... - -T = TypeVar('T') [builtins fixtures/list.pyi] [case testNewAnalyzerAliasesFixedMany] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... + def func(x: List[C[T]]) -> T: ... @@ -1822,9 +1820,7 @@ A = List[C[int, str]] # E: "C" expects 1 type argument, but 2 given reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... -T = TypeVar('T') + [builtins fixtures/list.pyi] [case testNewAnalyzerBuiltinAliasesFixed] @@ -1973,7 +1969,7 @@ class A: pass class B: pass class C(B): pass -S = TypeVar('S', bound=Tuple[G[A], ...]) +S = TypeVar('S', bound='Tuple[G[A], ...]') class GG(Generic[S]): pass @@ -2060,12 +2056,12 @@ class C(Tuple[int, str]): class Meta(type): x = int() -y = C.x -reveal_type(y) # N: Revealed type is "builtins.int" - class C(metaclass=Meta): pass +y = C.x +reveal_type(y) # N: Revealed type is "builtins.int" + [case testNewAnalyzerFunctionError] def f(x: asdf) -> None: # E: Name "asdf" is not defined pass @@ -2089,7 +2085,7 @@ from typing import NewType, List x: C reveal_type(x[0]) # N: Revealed type is "__main__.C" -C = NewType('C', B) +C = NewType('C', 'B') class B(List[C]): pass @@ -2101,8 +2097,8 @@ from typing import NewType, List x: D reveal_type(x[0]) # N: Revealed type is "__main__.C" +C = NewType('C', 'B') D = C -C = NewType('C', B) class B(List[D]): pass @@ -2114,22 +2110,22 @@ from typing import NewType, List x: D reveal_type(x[0][0]) # N: Revealed type is "__main__.C" -D = C -C = NewType('C', List[B]) +D = C # E: Name "C" is used before definition +C = NewType('C', 'List[B]') class B(List[C]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasDirect] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import NewType, List x: D reveal_type(x[0][0]) D = List[C] -C = NewType('C', B) +C = NewType('C', 'B') class B(D): pass @@ -2178,9 +2174,9 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [case testNewAnalyzerDuplicateTypeVar] from typing import TypeVar, Generic, Any -T = TypeVar('T', bound=B[Any]) +T = TypeVar('T', bound='B[Any]') # The "int" error is because of typing fixture. -T = TypeVar('T', bound=C) # E: Cannot redefine "T" as a type variable \ +T = TypeVar('T', bound='C') # E: Cannot redefine "T" as a type variable \ # E: Invalid assignment target \ # E: "int" not callable @@ -2193,6 +2189,7 @@ y: B[B[Any]] reveal_type(y.x) # N: Revealed type is "__main__.B[Any]" [case testNewAnalyzerDuplicateTypeVarImportCycle] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2220,6 +2217,7 @@ tmp/a.py:5: error: Invalid assignment target tmp/a.py:5: error: "int" not callable [case testNewAnalyzerDuplicateTypeVarImportCycleWithAliases] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2313,6 +2311,7 @@ C = NamedTuple('C', [('x', int)]) [builtins fixtures/tuple.pyi] [case testNewAnalyzerApplicationForward1] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[int]() @@ -2335,15 +2334,14 @@ class A: ... [case testNewAnalyzerApplicationForward3] from typing import Generic, TypeVar -x = C[A]() -reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" - +class A: ... T = TypeVar('T') class C(Generic[T]): ... - -class A: ... +x = C[A]() +reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" [case testNewAnalyzerApplicationForward4] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[A]() # E: Value of type variable "T" of "C" cannot be "A" @@ -2474,6 +2472,9 @@ else: y() # E: "str" not callable [case testNewAnalyzerFirstAliasTargetWins] +class DesiredTarget: + attr: int + if int(): Alias = DesiredTarget else: @@ -2483,12 +2484,8 @@ else: x: Alias reveal_type(x.attr) # N: Revealed type is "builtins.int" - -class DesiredTarget: - attr: int - [case testNewAnalyzerFirstVarDefinitionWins] -x = y +x = y # E: Name "y" is used before definition x = 1 # We want to check that the first definition creates the variable. diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index a5e6cefc2af0..4209f4ec9164 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -25,6 +25,9 @@ main:6: error: Name "f" already defined on line 2 [case testTypeCheckOverloadWithImplementation] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -35,14 +38,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypingExtensionsOverload] from typing import Any from typing_extensions import overload +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -53,13 +56,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadNeedsImplementation] from typing import overload, Any + +class A: pass +class B: pass + @overload # E: An overloaded function outside a stub file must have an implementation def f(x: 'A') -> 'B': ... @overload @@ -67,9 +71,6 @@ def f(x: 'B') -> 'A': ... reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testSingleOverloadNoImplementation] @@ -84,6 +85,9 @@ class B: pass [case testOverloadByAnyOtherName] from typing import overload as rose from typing import Any +class A: pass +class B: pass + @rose def f(x: 'A') -> 'B': ... @rose @@ -94,14 +98,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithDecoratedImplementation] from typing import overload, Any +class A: pass +class B: pass + def deco(fun): ... @overload @@ -115,9 +119,6 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadDecoratedImplementationNotLast] @@ -174,6 +175,9 @@ class B: pass [case testTypeCheckOverloadWithImplementationError] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -196,9 +200,6 @@ def g(x): reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithUntypedImplAndMultipleVariants] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 8c4aef9b5be0..e490457ff25c 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -794,7 +794,7 @@ main:18: note: def attr2(self) -> str [case testSelfTypesWithProtocolsBehaveAsWithNominal] from typing import Protocol, TypeVar -T = TypeVar('T', bound=Shape) +T = TypeVar('T', bound='Shape') class Shape(Protocol): def combine(self: T, other: T) -> T: pass diff --git a/test-data/unit/check-python39.test b/test-data/unit/check-python39.test index d169f4001015..105051a840bb 100644 --- a/test-data/unit/check-python39.test +++ b/test-data/unit/check-python39.test @@ -4,9 +4,9 @@ # most important test, to deal with this we'll only run this test with Python 3.9 and later. import typing def f(a: 'A', b: 'B') -> None: pass -f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPEP614] diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 0aa3c4c18be3..53811521f442 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -88,6 +88,7 @@ A = int | list[A] -- Tests duplicating some existing type alias tests with recursive aliases enabled [case testRecursiveAliasesMutual] +# flags: --disable-error-code used-before-def from typing import Type, Callable, Union A = Union[B, int] @@ -120,6 +121,7 @@ B = List[A] [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -131,6 +133,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass2] +# flags: --disable-error-code used-before-def from typing import NewType, List x: D @@ -162,6 +165,7 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClassImported] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -376,6 +380,7 @@ x: A y: str = x[0] # E: Incompatible types in assignment (expression has type "Optional[A]", variable has type "str") [case testRecursiveAliasesProhibitBadAliases] +# flags: --disable-error-code used-before-def from typing import Union, Type, List, TypeVar NR = List[int] @@ -502,6 +507,8 @@ reveal_type(bnt.y) # N: Revealed type is "builtins.int" -- Tests duplicating some existing named tuple tests with recursive aliases enabled [case testMutuallyRecursiveNamedTuples] +# flags: --disable-error-code used-before-def + from typing import Tuple, NamedTuple, TypeVar, Union A = NamedTuple('A', [('x', str), ('y', Tuple[B, ...])]) @@ -565,6 +572,7 @@ t = m # E: Incompatible types in assignment (expression has type "B", variable [builtins fixtures/tuple.pyi] [case testMutuallyRecursiveNamedTuplesCalls] +# flags: --disable-error-code used-before-def from typing import NamedTuple B = NamedTuple('B', [('x', A), ('y', int)]) @@ -862,6 +870,7 @@ reveal_type(Sub) # N: Revealed type is "def [ValueT] (element: Union[ValueT`1, reveal_type(Sub(x)) # N: Revealed type is "__main__.Sub[typing.Iterable[builtins.str]]" [case testNoRecursiveExpandInstanceUnionCrashInference] +# flags: --disable-error-code used-before-def from typing import TypeVar, Union, Generic, List T = TypeVar("T") diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index b002746a3397..dd177e143aaa 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -428,7 +428,7 @@ class C: [case testSelfTypeNew] from typing import TypeVar, Type -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: def __new__(cls: Type[T]) -> T: return cls() @@ -998,7 +998,7 @@ reveal_type(ab.x) # N: Revealed type is "builtins.int" from typing import Generic, List, Optional, TypeVar, Any Q = TypeVar("Q") -T = TypeVar("T", bound=Super[Any]) +T = TypeVar("T", bound='Super[Any]') class Super(Generic[Q]): @classmethod @@ -1157,7 +1157,7 @@ from typing import Optional, Type, TypeVar, overload, Union Id = int -A = TypeVar("A", bound=AClass) +A = TypeVar("A", bound='AClass') class AClass: @overload @@ -1682,7 +1682,7 @@ class This: ... [case testTypingSelfAttrOldVsNewStyle] from typing import Self, TypeVar -T = TypeVar("T", bound=C) +T = TypeVar("T", bound='C') class C: x: Self def foo(self: T) -> T: diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 3450f8593d27..d1a2469efa56 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -140,20 +140,15 @@ main:5: error: Incompatible types in assignment (expression has type "bool", var main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testForStatement] +class A: pass a = None # type: A b = None # type: object for a in [A()]: - a = b # Fail + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") else: - a = b # Fail - -class A: pass + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") [builtins fixtures/list.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") -main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A") - [case testBreakStatement] import typing while None: @@ -520,15 +515,15 @@ class B: pass main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testTypeErrorInBlock] +class A: pass +class B: pass while object: x = None # type: A if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass - [case testTypeErrorInvolvingBaseException] +class A: pass x, a = None, None # type: (BaseException, A) if int(): @@ -541,7 +536,6 @@ if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") if int(): x = BaseException() -class A: pass [builtins fixtures/exception.pyi] [case testSimpleTryExcept2] @@ -557,49 +551,38 @@ main:5: error: Incompatible types in assignment (expression has type "object", v [case testBaseClassAsExceptionTypeInExcept] import typing +class Err(BaseException): pass try: pass except Err as e: - e = BaseException() # Fail + e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") e = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testMultipleExceptHandlers] import typing +class Err(BaseException): pass try: pass except BaseException as e: pass except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptStatement] import typing +class A: pass +class B: pass +class Err(BaseException): pass try: - a = B() # type: A # Fail + a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") except BaseException as e: - e = A() # Fail + e = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") e = Err() except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class A: pass -class B: pass -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") -main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") -main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptWithinFunction] import typing def f() -> None: @@ -823,7 +806,7 @@ try: pass except E1 as e: pass try: pass except E2 as e: pass -e + 1 # E: Trying to read deleted variable "e" +e + 1 # E: Trying to read deleted variable "e" # E: Name "e" is used before definition e = E1() # E: Assignment to variable "e" outside except: block [builtins fixtures/exception.pyi] @@ -2052,16 +2035,12 @@ foo = int [case testTypeOfGlobalUsed] import typing +class A(): pass +class B(): pass g = A() def f() -> None: global g - g = B() - -class A(): pass -class B(): pass -[out] -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") - + g = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfNonlocalUsed] import typing def f() -> None: diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index cdb27d10fe0c..535a8ae5007e 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -164,10 +164,10 @@ class C(B): pass [case testVoidValueInTuple] import typing +def f() -> None: pass + (None, f()) # E: "f" does not return a value (f(), None) # E: "f" does not return a value - -def f() -> None: pass [builtins fixtures/tuple.pyi] @@ -247,15 +247,16 @@ class B: pass [case testAssigningToTupleItems] from typing import Tuple + +class A: pass +class B: pass + t = None # type: Tuple[A, B] n = 0 t[0] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[2] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[n] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -532,13 +533,12 @@ if int(): [case testAssignmentToStarFromAny] from typing import Any, cast +class C: pass + a, c = cast(Any, 1), C() p, *q = a c = a c = q - -class C: pass - [case testAssignmentToComplexStar] from typing import List li = None # type: List[int] @@ -572,6 +572,7 @@ class A: pass [case testAssignmentToStarFromTupleInference] from typing import List +class A: pass li = None # type: List[int] la = None # type: List[A] a, *l = A(), A() @@ -579,13 +580,14 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListInference] from typing import List + +class A: pass + li = None # type: List[int] la = None # type: List[A] a, *l = [A(), A()] @@ -593,8 +595,6 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] @@ -710,6 +710,9 @@ class C: pass [case testTupleErrorMessages] +class A: + def __add__(self, x: 'A') -> 'A': pass +def f(x: 'A') -> None: pass a = None # type: A @@ -717,11 +720,6 @@ a = None # type: A a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]") f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A" (a, a).foo # E: "Tuple[A, A]" has no attribute "foo" - -def f(x: 'A') -> None: pass - -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/tuple.pyi] [case testLargeTuplesInErrorMessages] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index e5d9bf94873a..d7cccd2d6ba6 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -206,7 +206,7 @@ B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) [case testRecursiveAliasesErrors2] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code=used-before-def # Recursive aliases are not supported yet. from typing import Type, Callable, Union @@ -224,6 +224,7 @@ main:7: error: Cannot resolve name "C" (possible cyclic definition) main:9: note: Revealed type is "Union[Any, builtins.int]" [case testDoubleForwardAlias] +# flags: --disable-error-code=used-before-def from typing import List x: A A = List[B] @@ -233,6 +234,7 @@ reveal_type(x) # N: Revealed type is "builtins.list[builtins.list[builtins.int]] [out] [case testDoubleForwardAliasWithNamedTuple] +# flags: --disable-error-code=used-before-def from typing import List, NamedTuple x: A A = List[B] @@ -254,6 +256,7 @@ if isinstance(x, list): [out] [case testForwardRefToTypeVar] +# flags: --disable-error-code=used-before-def from typing import TypeVar, List reveal_type(a) # N: Revealed type is "builtins.list[builtins.int]" @@ -444,7 +447,7 @@ A = Union[None] [case testAliasToClassMethod] from typing import TypeVar, Generic, Union, Type -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') MYPY = False if MYPY: diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index d277fa441b1e..e426b8a7630b 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -1494,7 +1494,7 @@ class G(Generic[T]): yb: G[int] # E: Type argument "int" of "G" must be a subtype of "M" yg: G[M] -z: int = G[M]().x['x'] +z: int = G[M]().x['x'] # type: ignore[used-before-def] class M(TypedDict): x: int @@ -2279,7 +2279,7 @@ reveal_type(foo['baz']) # N: Revealed type is "builtins.list[Any]" from mypy_extensions import TypedDict from typing import Any, List -Foo = TypedDict('Foo', {'bar': Bar, 'baz': Bar}) +Foo = TypedDict('Foo', {'bar': 'Bar', 'baz': 'Bar'}) Bar = List[Any] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 00ac7df320d2..d598fe13b7e9 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -38,6 +38,13 @@ def test(*t: type) -> None: [case testCallingVarArgsFunction] +def f( *a: 'A') -> None: pass + +def g() -> None: pass + +class A: pass +class B(A): pass +class C: pass a = None # type: A b = None # type: B @@ -51,17 +58,14 @@ f() f(a) f(b) f(a, b, a, b) +[builtins fixtures/list.pyi] -def f( *a: 'A') -> None: pass - -def g() -> None: pass +[case testCallingVarArgsFunctionWithAlsoNormalArgs] +def f(a: 'C', *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithAlsoNormalArgs] a = None # type: A b = None # type: B @@ -73,16 +77,16 @@ f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f(c) f(c, a) f(c, b, b, a, b) +[builtins fixtures/list.pyi] -def f(a: 'C', *b: 'A') -> None: pass +[case testCallingVarArgsFunctionWithDefaultArgs] +# flags: --implicit-optional --no-strict-optional +def f(a: 'C' = None, *b: 'A') -> None: + pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithDefaultArgs] -# flags: --implicit-optional --no-strict-optional a = None # type: A b = None # type: B @@ -95,13 +99,6 @@ f() f(c) f(c, a) f(c, b, b, a, b) - -def f(a: 'C' = None, *b: 'A') -> None: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterable] @@ -156,6 +153,14 @@ f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [case testTypeInferenceWithCalleeVarArgs] from typing import TypeVar T = TypeVar('T') + +def f( *a: T) -> T: + pass + +class A: pass +class B(A): pass +class C: pass + a = None # type: A b = None # type: B c = None # type: C @@ -180,13 +185,6 @@ if int(): o = f(a, b, o) if int(): c = f(c) - -def f( *a: T) -> T: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testTypeInferenceWithCalleeVarArgsAndDefaultArgs] @@ -195,6 +193,11 @@ T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T, b: T = None, *c: T) -> T: + pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -210,11 +213,6 @@ if int(): a = f(a, a) if int(): a = f(a, a, a) - -def f(a: T, b: T = None, *c: T) -> T: - pass - -class A: pass [builtins fixtures/list.pyi] @@ -224,27 +222,31 @@ class A: pass [case testCallingWithListVarArgs] from typing import List, Any, cast + +def f(a: 'A', b: 'B') -> None: + pass + +class A: pass +class B: pass + aa = None # type: List[A] ab = None # type: List[B] a = None # type: A b = None # type: B -f(*aa) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" f(a, *ab) # Ok f(a, b) (cast(Any, f))(*aa) # IDEA: Move to check-dynamic? (cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic? - -def f(a: 'A', b: 'B') -> None: - pass +[builtins fixtures/list.pyi] +[case testCallingWithTupleVarArgs] +def f(a: 'A', b: 'B', c: 'C') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" - -[case testCallingWithTupleVarArgs] +class C: pass +class CC(C): pass a = None # type: A b = None # type: B @@ -262,27 +264,20 @@ f(*(a, b, c)) f(a, *(b, c)) f(a, b, *(c,)) f(a, *(b, cc)) - -def f(a: 'A', b: 'B', c: 'C') -> None: pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/tuple.pyi] [case testInvalidVarArg] +def f(a: 'A') -> None: + pass + +class A: pass + a = None # type: A f(*None) f(*a) # E: List or tuple expected as variadic arguments f(*(a,)) - -def f(a: 'A') -> None: - pass - -class A: pass [builtins fixtures/tuple.pyi] @@ -292,34 +287,33 @@ class A: pass [case testCallingVarArgsFunctionWithListVarArgs] from typing import List + +def f(a: 'A', *b: 'B') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass +class B: pass + aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B) -f(*aa) # Fail -f(a, *aa) # Fail -f(b, *ab) # Fail -f(a, a, *ab) # Fail -f(a, b, *aa) # Fail -f(b, b, *ab) # Fail -g(*ab) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" +f(a, *aa) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" +f(b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +f(a, a, *ab) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(a, b, *aa) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" +f(b, b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +g(*ab) # E: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" f(a, *ab) f(a, b, *ab) f(a, b, b, *ab) g(*aa) +[builtins fixtures/list.pyi] +[case testCallingVarArgsFunctionWithTupleVarArgs] +def f(a: 'A', *b: 'B') -> None: + pass -def f(a: 'A', *b: 'B') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" -main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" -main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B" -main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" -main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" - -[case testCallingVarArgsFunctionWithTupleVarArgs] +class C: pass +class CC(C): pass a, b, c, cc = None, None, None, None # type: (A, B, C, CC) @@ -335,14 +329,6 @@ f(*()) # E: Too few arguments for "f" f(*(a, b, b)) f(a, *(b, b)) f(a, b, *(b,)) - -def f(a: 'A', *b: 'B') -> None: - pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/list.pyi] @@ -352,32 +338,21 @@ class CC(C): pass [case testDynamicVarArg] from typing import Any +def f(a: 'A') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass + d, a = None, None # type: (Any, A) -f(a, a, *d) # Fail +f(a, a, *d) # E: Too many arguments for "f" f(a, *d) # Ok f(*d) # Ok g(*d) g(a, *d) g(a, a, *d) - -def f(a: 'A') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass -class A: pass [builtins fixtures/list.pyi] -[out] -main:3: error: Too many arguments for "f" - [case testListVarArgsAndSubtyping] from typing import List -aa = None # type: List[A] -ab = None # type: List[B] - -g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" -f(*aa) -f(*ab) -g(*ab) - def f( *a: 'A') -> None: pass @@ -386,11 +361,25 @@ def g( *a: 'B') -> None: class A: pass class B(A): pass + +aa = None # type: List[A] +ab = None # type: List[B] + +g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" +f(*aa) +f(*ab) +g(*ab) [builtins fixtures/list.pyi] [case testCallerVarArgsAndDefaultArgs] # flags: --implicit-optional --no-strict-optional +def f(a: 'A', b: 'B' = None, *c: 'B') -> None: + pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) f(*()) # E: Too few arguments for "f" f(a, *[a]) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" \ @@ -403,12 +392,6 @@ f(*(a, b, b, b)) f(a, *[]) f(a, *[b]) f(a, *[b, b]) - -def f(a: 'A', b: 'B' = None, *c: 'B') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall1] @@ -528,6 +511,13 @@ def f(a: B, *b: B) -> B: pass from typing import List, TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, *b: T) -> Tuple[S, T]: + pass + +class A: pass +class B: pass + a, b, aa = None, None, None # type: (A, B, List[A]) if int(): @@ -551,18 +541,18 @@ if int(): b, a = f(b, *aa) if int(): b, a = f(b, a, *aa) - -def f(a: S, *b: T) -> Tuple[S, T]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarArgsTupleWithTypeInference] from typing import TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> Tuple[S, T]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) if int(): @@ -579,11 +569,6 @@ if int(): a, b = f(*(a, b)) if int(): a, b = f(a, *(b,)) - -def f(a: S, b: T) -> Tuple[S, T]: pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarargsAndComplexTypeInference] @@ -595,6 +580,13 @@ ao = None # type: List[object] aa = None # type: List[A] ab = None # type: List[B] +class G(Generic[T]): + def f(self, *a: S) -> Tuple[List[S], List[T]]: + pass + +class A: pass +class B: pass + if int(): a, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ @@ -621,13 +613,6 @@ if int(): # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant - -class G(Generic[T]): - def f(self, *a: S) -> Tuple[List[S], List[T]]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 5a7f21d48c20..d4b2d3469871 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -5525,11 +5525,13 @@ a.py:5: error: Argument 1 to "f" has incompatible type "C"; expected "int" import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5542,25 +5544,27 @@ def T() -> None: pass [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Function "a.T" is not valid as a type -main:4: note: Perhaps you need "Callable[...]" or a callback protocol? -main:6: error: Free type variable expected in Generic[...] -main:7: error: Function "a.T" is not valid as a type -main:7: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Function "a.T" is not valid as a type -main:10: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Function "a.T" is not valid as a type +main:6: note: Perhaps you need "Callable[...]" or a callback protocol? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Function "a.T" is not valid as a type +main:9: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Function "a.T" is not valid as a type +main:12: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeVarToModule] import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5574,15 +5578,15 @@ import T [out] == == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Module "T" is not valid as a type -main:4: note: Perhaps you meant to use a protocol matching the module structure? -main:6: error: Free type variable expected in Generic[...] -main:7: error: Module "T" is not valid as a type -main:7: note: Perhaps you meant to use a protocol matching the module structure? -main:10: error: Module "T" is not valid as a type -main:10: note: Perhaps you meant to use a protocol matching the module structure? -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Module "T" is not valid as a type +main:6: note: Perhaps you meant to use a protocol matching the module structure? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Module "T" is not valid as a type +main:9: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Module "T" is not valid as a type +main:12: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeClassToModule] @@ -5614,11 +5618,13 @@ main:8: note: Perhaps you meant to use a protocol matching the module structure? import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5630,9 +5636,9 @@ from typing import TypeVar T = int [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:6: error: Free type variable expected in Generic[...] -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:9: error: "C" expects no type arguments, but 1 given +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeAliasToModule] @@ -8201,6 +8207,7 @@ x = 1 == [case testIdLikeDecoForwardCrashAlias] +# flags: --disable-error-code used-before-def import b [file b.py] from typing import Callable, Any, TypeVar diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test index 870c686807c3..20443517e03e 100644 --- a/test-data/unit/semanal-basic.test +++ b/test-data/unit/semanal-basic.test @@ -50,26 +50,27 @@ MypyFile:1( Args()))) [case testAccessingGlobalNameBeforeDefinition] +# flags: --disable-error-code used-before-def x f() x = 1 def f(): pass [out] MypyFile:1( - ExpressionStmt:1( - NameExpr(x [__main__.x])) ExpressionStmt:2( - CallExpr:2( + NameExpr(x [__main__.x])) + ExpressionStmt:3( + CallExpr:3( NameExpr(f [__main__.f]) Args())) - AssignmentStmt:3( + AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) builtins.int) - FuncDef:4( + FuncDef:5( f - Block:4( - PassStmt:4()))) + Block:5( + PassStmt:5()))) [case testFunctionArgs] def f(x, y): diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index 27ff101c04d0..013452068cf1 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -789,6 +789,7 @@ MypyFile:1( Args()))))) [case testTryExceptWithMultipleHandlers] +class Err(BaseException): pass try: pass except BaseException as e: @@ -796,36 +797,34 @@ except BaseException as e: except Err as f: f = BaseException() # Fail f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] MypyFile:1( - TryStmt:1( - Block:1( - PassStmt:2()) + ClassDef:1( + Err + BaseType( + builtins.BaseException) + PassStmt:1()) + TryStmt:2( + Block:2( + PassStmt:3()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) - Block:3( - PassStmt:4()) + Block:4( + PassStmt:5()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) - Block:5( - AssignmentStmt:6( + Block:6( + AssignmentStmt:7( NameExpr(f [__main__.f]) - CallExpr:6( + CallExpr:7( NameExpr(BaseException [builtins.BaseException]) Args())) - AssignmentStmt:7( + AssignmentStmt:8( NameExpr(f [__main__.f]) - CallExpr:7( + CallExpr:8( NameExpr(Err [__main__.Err]) - Args())))) - ClassDef:8( - Err - BaseType( - builtins.BaseException) - PassStmt:8())) - + Args()))))) [case testMultipleAssignmentWithPartialNewDef] # flags: --allow-redefinition o = None diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 5cbdf38d1b4f..26caef0d6dde 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -21,15 +21,15 @@ [case testConstructorCall] import typing -A() -B() class A: pass class B: pass +A() +B() [out] -CallExpr(2) : A -NameExpr(2) : def () -> A -CallExpr(3) : B -NameExpr(3) : def () -> B +CallExpr(4) : A +NameExpr(4) : def () -> A +CallExpr(5) : B +NameExpr(5) : def () -> B [case testLiterals] import typing @@ -202,17 +202,17 @@ UnaryExpr(6) : builtins.bool [case testFunctionCall] ## CallExpr from typing import Tuple -f( - A(), - B()) class A: pass class B: pass def f(a: A, b: B) -> Tuple[A, B]: pass +f( + A(), + B()) [builtins fixtures/tuple-simple.pyi] [out] -CallExpr(3) : Tuple[A, B] -CallExpr(4) : A -CallExpr(5) : B +CallExpr(6) : Tuple[A, B] +CallExpr(7) : A +CallExpr(8) : B -- Statements @@ -602,28 +602,26 @@ NameExpr(4) : def [t] (x: t`-1) -> t`-1 ## CallExpr from typing import TypeVar, Generic T = TypeVar('T') -f(g()) -f(h(b)) -f(h(c)) - -b = None # type: B -c = None # type: C - +class A(Generic[T]): pass +class B: pass +class C(B): pass def f(a: 'A[B]') -> None: pass - def g() -> 'A[T]': pass def h(a: T) -> 'A[T]': pass -class A(Generic[T]): pass -class B: pass -class C(B): pass +b = None # type: B +c = None # type: C + +f(g()) +f(h(b)) +f(h(c)) [out] -CallExpr(4) : None -CallExpr(4) : A[B] -CallExpr(5) : None -CallExpr(5) : A[B] -CallExpr(6) : None -CallExpr(6) : A[B] +CallExpr(14) : None +CallExpr(14) : A[B] +CallExpr(15) : None +CallExpr(15) : A[B] +CallExpr(16) : None +CallExpr(16) : A[B] [case testInferGenericTypeForLocalVariable] from typing import TypeVar, Generic @@ -697,21 +695,21 @@ ListExpr(2) : builtins.list[Any] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - f, - [A()]) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +map( + f, + [A()]) [builtins fixtures/list.pyi] [out] -CallExpr(4) : builtins.list[B] -NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(5) : def (a: A) -> B -CallExpr(6) : A -ListExpr(6) : builtins.list[A] -NameExpr(6) : def () -> A +CallExpr(8) : builtins.list[B] +NameExpr(8) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(9) : def (a: A) -> B +CallExpr(10) : A +ListExpr(10) : builtins.list[A] +NameExpr(10) : def () -> A -- Lambdas @@ -761,106 +759,106 @@ ListExpr(2) : builtins.list[A] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: f(x), l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: f(x), l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -CallExpr(6) : B -LambdaExpr(6) : def (A) -> B -NameExpr(6) : def (a: A) -> B -NameExpr(6) : builtins.list[A] -NameExpr(6) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +CallExpr(10) : B +LambdaExpr(10) : def (A) -> B +NameExpr(10) : def (a: A) -> B +NameExpr(10) : builtins.list[A] +NameExpr(10) : A [case testLambdaAndHigherOrderFunction2] ## LambdaExpr|NameExpr|ListExpr from typing import TypeVar, List, Callable t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: [f(x)], l) def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: [f(x)], l) [builtins fixtures/list.pyi] [out] -NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(7) : def (A) -> builtins.list[B] -ListExpr(7) : builtins.list[B] -NameExpr(7) : def (a: A) -> B -NameExpr(7) : builtins.list[A] -NameExpr(7) : A +NameExpr(10) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(11) : def (A) -> builtins.list[B] +ListExpr(11) : builtins.list[B] +NameExpr(11) : def (a: A) -> B +NameExpr(11) : builtins.list[A] +NameExpr(11) : A [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass +class A: pass l = None # type: List[A] map( [lambda x: x], l) -def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass -class A: pass [builtins fixtures/list.pyi] [out] -- TODO We probably should not silently infer 'Any' types in statically typed -- context. Perhaps just fail instead? -CallExpr(5) : builtins.list[Any] -NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] -LambdaExpr(6) : def (A) -> A -ListExpr(6) : builtins.list[def (A) -> Any] -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(7) : builtins.list[Any] +NameExpr(7) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] +LambdaExpr(8) : def (A) -> A +ListExpr(8) : builtins.list[def (A) -> Any] +NameExpr(8) : A +NameExpr(9) : builtins.list[A] [case testLambdaAndHigherOrderFunction3] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: x.b, - l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass +l = None # type: List[A] +map( + lambda x: x.b, + l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(6) : def (A) -> B -MemberExpr(6) : B -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(10) : def (A) -> B +MemberExpr(10) : B +NameExpr(10) : A +NameExpr(11) : builtins.list[A] [case testLambdaAndHigherOrderFunctionAndKeywordArgs] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass +class A: + b = None # type: B +class B: pass l = None # type: List[A] map( a=l, f=lambda x: x.b) -def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass -class A: - b = None # type: B -class B: pass [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(6) : builtins.list[A] -LambdaExpr(7) : def (A) -> B -MemberExpr(7) : B -NameExpr(7) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(10) : builtins.list[A] +LambdaExpr(11) : def (A) -> B +MemberExpr(11) : B +NameExpr(11) : A -- Boolean operations From 9a8c171ed81c0c9510386f7a0451682167021d02 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 20 Jan 2023 16:54:22 +0000 Subject: [PATCH 189/292] Fix false positive on generic base class with six (#14478) Fixes #14475 The fix is straightforward. We need to use the "guarded accept" at this stage, similar to e.g. `clean_up_bases_and_infer_type_variables()`. --- mypy/semanal.py | 2 +- test-data/unit/check-classes.test | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index acc485a609e0..176a9e4053a8 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2181,7 +2181,7 @@ def infer_metaclass_and_bases_from_compat_helpers(self, defn: ClassDef) -> None: if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): - base_expr.accept(self) + self.analyze_type_expr(base_expr) if ( base_expr.callee.fullname in { diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index fce1aa1768f9..9a38d8f344f7 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -5293,6 +5293,19 @@ class F(six.with_metaclass(t.M)): pass class G: pass [builtins fixtures/tuple.pyi] +[case testSixMetaclassGenericBase] +import six +import abc +from typing import TypeVar, Generic + +T = TypeVar("T") + +class C(six.with_metaclass(abc.ABCMeta, Generic[T])): + pass +class D(six.with_metaclass(abc.ABCMeta, C[T])): + pass +[builtins fixtures/tuple.pyi] + -- Special support for future.utils -- -------------------------------- From 8f4da0e6f07a15a1d4acae98a588796c8ad81fb6 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 20 Jan 2023 14:12:49 -0800 Subject: [PATCH 190/292] Fix incorrect join in the presence of Any fallback (#14404) Fixes #11925 This avoids mypy from guessing subtype relationships because of the any fallback over here: https://github.com/python/mypy/blob/e1bfb75ed2187db76d51ed875ce953da3ba4d02c/mypy/subtypes.py#L438 --- mypy/join.py | 5 ++++- mypy/subtypes.py | 6 +----- test-data/unit/check-inference.test | 12 ++++++++++++ 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/mypy/join.py b/mypy/join.py index 84aa03f8eeba..62d256f4440f 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -9,6 +9,7 @@ from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT from mypy.state import state from mypy.subtypes import ( + SubtypeContext, find_member, is_equivalent, is_proper_subtype, @@ -101,7 +102,9 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: assert new_type is not None args.append(new_type) result: ProperType = Instance(t.type, args) - elif t.type.bases and is_subtype(t, s, ignore_type_params=True): + elif t.type.bases and is_proper_subtype( + t, s, subtype_context=SubtypeContext(ignore_type_params=True) + ): result = self.join_instances_via_supertype(t, s) else: # Now t is not a subtype of s, and t != s. Now s could be a subtype diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 83cb22d48fab..0214e7ae308a 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -103,11 +103,7 @@ def check_context(self, proper_subtype: bool) -> None: # Historically proper and non-proper subtypes were defined using different helpers # and different visitors. Check if flag values are such that we definitely support. if proper_subtype: - assert ( - not self.ignore_type_params - and not self.ignore_pos_arg_names - and not self.ignore_declared_variance - ) + assert not self.ignore_pos_arg_names and not self.ignore_declared_variance else: assert not self.erase_instances and not self.keep_erased_types diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 41fe942b8339..331b110fded6 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3380,3 +3380,15 @@ class A: T = TypeVar("T") def type_or_callable(value: T, tp: Union[Type[T], Callable[[int], T]]) -> T: ... reveal_type(type_or_callable(A("test"), A)) # N: Revealed type is "__main__.A" + +[case testJoinWithAnyFallback] +from unknown import X # type: ignore[import] + +class A: ... +class B(X, A): ... +class C(B): ... +class D(C): ... +class E(D): ... + +reveal_type([E(), D()]) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type([D(), E()]) # N: Revealed type is "builtins.list[__main__.D]" From 272c8fd4948fcae9ae91430fbb5623d341e87ddd Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 21 Jan 2023 14:06:48 +0000 Subject: [PATCH 191/292] Fix recursive TypedDicts/NamedTuples defined with call syntax (#14488) Fixes #14460 Recursive TypedDicts/NamedTuples defined with call syntax that have item types that look like type applications suffer the same chicken-and-egg problem that recursive type aliases. Fortunately, there is a very simple way to distinguish them without fully analyzing rvalues, this is what this PR does. --- mypy/semanal.py | 17 ++++++++++------- test-data/unit/check-recursive-types.test | 17 +++++++++++++++++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 176a9e4053a8..d7bf60501b36 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2648,7 +2648,7 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: # But we can't use a full visit because it may emit extra incomplete refs (namely # when analysing any type applications there) thus preventing the further analysis. # To break the tie, we first analyse rvalue partially, if it can be a type alias. - if self.can_possibly_be_index_alias(s): + if self.can_possibly_be_type_form(s): old_basic_type_applications = self.basic_type_applications self.basic_type_applications = True with self.allow_unbound_tvars_set(): @@ -2664,7 +2664,7 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: for expr in names_modified_by_assignment(s): self.mark_incomplete(expr.name, expr) return - if self.can_possibly_be_index_alias(s): + if self.can_possibly_be_type_form(s): # Now re-visit those rvalues that were we skipped type applications above. # This should be safe as generally semantic analyzer is idempotent. with self.allow_unbound_tvars_set(): @@ -2807,16 +2807,19 @@ def can_be_type_alias(self, rv: Expression, allow_none: bool = False) -> bool: return True return False - def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: - """Like can_be_type_alias(), but simpler and doesn't require analyzed rvalue. + def can_possibly_be_type_form(self, s: AssignmentStmt) -> bool: + """Like can_be_type_alias(), but simpler and doesn't require fully analyzed rvalue. - Instead, use lvalues/annotations structure to figure out whether this can - potentially be a type alias definition. Another difference from above function - is that we are only interested IndexExpr and OpExpr rvalues, since only those + Instead, use lvalues/annotations structure to figure out whether this can potentially be + a type alias definition, NamedTuple, or TypedDict. Another difference from above function + is that we are only interested IndexExpr, CallExpr and OpExpr rvalues, since only those can be potentially recursive (things like `A = A` are never valid). """ if len(s.lvalues) > 1: return False + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + ref = s.rvalue.callee.fullname + return ref in TPDICT_NAMES or ref in TYPED_NAMEDTUPLE_NAMES if not isinstance(s.lvalues[0], NameExpr): return False if s.unanalyzed_type is not None and not self.is_pep_613(s): diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 53811521f442..b7b4372ecc12 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -880,3 +880,20 @@ class InListRecurse(Generic[T], List[InList[T]]): ... def list_thing(transforming: InList[T]) -> T: ... reveal_type(list_thing([5])) # N: Revealed type is "builtins.list[builtins.int]" + +[case testRecursiveTypedDictWithList] +from typing import List +from typing_extensions import TypedDict + +Example = TypedDict("Example", {"rec": List["Example"]}) +e: Example +reveal_type(e) # N: Revealed type is "TypedDict('__main__.Example', {'rec': builtins.list[...]})" +[builtins fixtures/dict.pyi] + +[case testRecursiveNamedTupleWithList] +from typing import List, NamedTuple + +Example = NamedTuple("Example", [("rec", List["Example"])]) +e: Example +reveal_type(e) # N: Revealed type is "Tuple[builtins.list[...], fallback=__main__.Example]" +[builtins fixtures/tuple.pyi] From eea917ee5c060569fdc9326b30eb0a38ea6f977a Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 21 Jan 2023 14:35:32 +0000 Subject: [PATCH 192/292] Fix crash in await inside comprehension outside function (#14486) Fixes #14345 I also decided to make this error a blocker, since it is essentially a syntax error. (And also a similar error for `yield` is a blocker). --- mypy/semanal.py | 7 ++++--- test-data/unit/check-async-await.test | 12 ++++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index d7bf60501b36..f42eee28517e 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -5166,10 +5166,11 @@ def visit_yield_expr(self, e: YieldExpr) -> None: e.expr.accept(self) def visit_await_expr(self, expr: AwaitExpr) -> None: - if not self.is_func_scope(): - self.fail('"await" outside function', expr) + if not self.is_func_scope() or not self.function_stack: + # We check both because is_function_scope() returns True inside comprehensions. + self.fail('"await" outside function', expr, serious=True, blocker=True) elif not self.function_stack[-1].is_coroutine: - self.fail('"await" outside coroutine ("async def")', expr) + self.fail('"await" outside coroutine ("async def")', expr, serious=True, blocker=True) expr.expr.accept(self) # diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index d53cba2fc642..40efe2d2cece 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -943,3 +943,15 @@ async def bar(x: Union[A, B]) -> None: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testInvalidComprehensionNoCrash] +async def foo(x: int) -> int: ... + +crasher = [await foo(x) for x in [1, 2, 3]] # E: "await" outside function + +def bad() -> None: + y = [await foo(x) for x in [1, 2, 3]] # E: "await" outside coroutine ("async def") +async def good() -> None: + y = [await foo(x) for x in [1, 2, 3]] # OK +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] From af895641ff395c7e78d012f0a9b995b8ed016c17 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 21 Jan 2023 16:04:48 +0000 Subject: [PATCH 193/292] [mypyc] Add irbuild test case for cast(i64, ...) (#14493) This tests already supported functionality to detect regressions. --- mypyc/test-data/irbuild-i64.test | 41 ++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index c6b62996bc80..2c4052fa4796 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1730,3 +1730,44 @@ L0: def f5(): L0: return 4 + +[case testI64Cast] +from typing import cast +from mypy_extensions import i64 + +def cast_object(o: object) -> i64: + return cast(i64, o) + +def cast_int(x: int) -> i64: + return cast(i64, x) +[out] +def cast_object(o): + o :: object + r0 :: int64 +L0: + r0 = unbox(int64, o) + return r0 +def cast_int(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + return r3 From cc1bcc9c35ed018d59596f6d75a70a5d8b8c1805 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 21 Jan 2023 16:19:03 +0000 Subject: [PATCH 194/292] Properly expand type in generic class with Self and TypeVar with values (#14491) Fixes #14374 It looks like we need to special-case `Self` in `expand_type()` to support it in generics over `TypeVar` with values, since `Self` is the only type variable that can legitimately have other type variables in its upper bound. --- mypy/expandtype.py | 4 ++++ test-data/unit/check-selftype.test | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 203c71b4e824..7933283b24d6 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -222,6 +222,10 @@ def visit_instance(self, t: Instance) -> Type: return args def visit_type_var(self, t: TypeVarType) -> Type: + # Normally upper bounds can't contain other type variables, the only exception is + # special type variable Self`0 <: C[T, S], where C is the class where Self is used. + if t.id.raw_id == 0: + t = t.copy_modified(upper_bound=t.upper_bound.accept(self)) repl = self.variables.get(t.id, t) if isinstance(repl, ProperType) and isinstance(repl, Instance): # TODO: do we really need to do this? diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index dd177e143aaa..2d45d28764a0 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1785,3 +1785,23 @@ class C(B, Generic[T]): inst = super().copy() reveal_type(inst) # N: Revealed type is "Self`0" return inst + +[case testTypingSelfWithValuesExpansion] +from typing import Self, Generic, TypeVar + +class A: pass +class B: pass +T = TypeVar("T", A, B) + +class C(Generic[T]): + val: T + def foo(self, x: T) -> None: ... + def bar(self, x: T) -> Self: + reveal_type(self.foo) # N: Revealed type is "def (x: __main__.A)" \ + # N: Revealed type is "def (x: __main__.B)" + self.foo(x) + return self + def baz(self: Self, x: T) -> None: + reveal_type(self.val) # N: Revealed type is "__main__.A" \ + # N: Revealed type is "__main__.B" + self.val = x From e8c844b613ff95a41aeba63096aecbde80b78d99 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 22 Jan 2023 11:18:05 +0000 Subject: [PATCH 195/292] Fix crash on Any metaclass in incremental mode (#14495) Fixes #14254 This essentially re-implements https://github.com/python/mypy/pull/13605 in a simpler way that also works in incremental mode. Also I decided to set `meta_fallback_to_any` in case of errors, to match how we do this for base classes. --- mypy/checkmember.py | 2 +- mypy/nodes.py | 7 +++++ mypy/semanal.py | 40 ++++++++++++++++----------- mypy/server/astdiff.py | 1 + mypy/subtypes.py | 2 +- test-data/unit/check-classes.test | 10 +++++-- test-data/unit/check-incremental.test | 11 ++++++++ test-data/unit/fine-grained.test | 2 -- 8 files changed, 52 insertions(+), 23 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 918ce7520454..f90a4f706a87 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -901,7 +901,7 @@ def analyze_class_attribute_access( # For modules use direct symbol table lookup. if not itype.extra_attrs.mod_name: return itype.extra_attrs.attrs[name] - if info.fallback_to_any: + if info.fallback_to_any or info.meta_fallback_to_any: return apply_class_attr_hook(mx, hook, AnyType(TypeOfAny.special_form)) return None diff --git a/mypy/nodes.py b/mypy/nodes.py index 4a4de9d4503d..38639d553b3d 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2800,6 +2800,7 @@ class is generic then it will be a type constructor of higher kind. "inferring", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "type_vars", "has_param_spec_type", "bases", @@ -2894,6 +2895,10 @@ class is generic then it will be a type constructor of higher kind. # (and __setattr__), but without the __getattr__ method. fallback_to_any: bool + # Same as above but for cases where metaclass has type Any. This will suppress + # all attribute errors only for *class object* access. + meta_fallback_to_any: bool + # Information related to type annotations. # Generic type variable names (full names) @@ -2963,6 +2968,7 @@ class is generic then it will be a type constructor of higher kind. "is_abstract", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "is_named_tuple", "is_newtype", "is_protocol", @@ -3002,6 +3008,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_final = False self.is_enum = False self.fallback_to_any = False + self.meta_fallback_to_any = False self._promote = [] self.alt_promote = None self.tuple_type = None diff --git a/mypy/semanal.py b/mypy/semanal.py index f42eee28517e..5653aa4547c4 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1579,7 +1579,9 @@ def analyze_class(self, defn: ClassDef) -> None: self.mark_incomplete(defn.name, defn) return - declared_metaclass, should_defer = self.get_declared_metaclass(defn.name, defn.metaclass) + declared_metaclass, should_defer, any_meta = self.get_declared_metaclass( + defn.name, defn.metaclass + ) if should_defer or self.found_incomplete_ref(tag): # Metaclass was not ready. Defer current target. self.mark_incomplete(defn.name, defn) @@ -1599,6 +1601,8 @@ def analyze_class(self, defn: ClassDef) -> None: self.setup_type_vars(defn, tvar_defs) if base_error: defn.info.fallback_to_any = True + if any_meta: + defn.info.meta_fallback_to_any = True with self.scope.class_scope(defn.info): self.configure_base_classes(defn, base_types) @@ -2247,8 +2251,17 @@ def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: def get_declared_metaclass( self, name: str, metaclass_expr: Expression | None - ) -> tuple[Instance | None, bool]: - """Returns either metaclass instance or boolean whether we should defer.""" + ) -> tuple[Instance | None, bool, bool]: + """Get declared metaclass from metaclass expression. + + Returns a tuple of three values: + * A metaclass instance or None + * A boolean indicating whether we should defer + * A boolean indicating whether we should set metaclass Any fallback + (either for Any metaclass or invalid/dynamic metaclass). + + The two boolean flags can only be True if instance is None. + """ declared_metaclass = None if metaclass_expr: metaclass_name = None @@ -2258,25 +2271,20 @@ def get_declared_metaclass( metaclass_name = get_member_expr_fullname(metaclass_expr) if metaclass_name is None: self.fail(f'Dynamic metaclass not supported for "{name}"', metaclass_expr) - return None, False + return None, False, True sym = self.lookup_qualified(metaclass_name, metaclass_expr) if sym is None: # Probably a name error - it is already handled elsewhere - return None, False + return None, False, True if isinstance(sym.node, Var) and isinstance(get_proper_type(sym.node.type), AnyType): - # Create a fake TypeInfo that fallbacks to `Any`, basically allowing - # all the attributes. Same thing as we do for `Any` base class. - any_info = self.make_empty_type_info(ClassDef(sym.node.name, Block([]))) - any_info.fallback_to_any = True - any_info._fullname = sym.node.fullname if self.options.disallow_subclassing_any: self.fail( - f'Class cannot use "{any_info.fullname}" as a metaclass (has type "Any")', + f'Class cannot use "{sym.node.name}" as a metaclass (has type "Any")', metaclass_expr, ) - return Instance(any_info, []), False + return None, False, True if isinstance(sym.node, PlaceholderNode): - return None, True # defer later in the caller + return None, True, False # defer later in the caller # Support type aliases, like `_Meta: TypeAlias = type` if ( @@ -2291,16 +2299,16 @@ def get_declared_metaclass( if not isinstance(metaclass_info, TypeInfo) or metaclass_info.tuple_type is not None: self.fail(f'Invalid metaclass "{metaclass_name}"', metaclass_expr) - return None, False + return None, False, False if not metaclass_info.is_metaclass(): self.fail( 'Metaclasses not inheriting from "type" are not supported', metaclass_expr ) - return None, False + return None, False, False inst = fill_typevars(metaclass_info) assert isinstance(inst, Instance) declared_metaclass = inst - return declared_metaclass, False + return declared_metaclass, False, False def recalculate_metaclass(self, defn: ClassDef, declared_metaclass: Instance | None) -> None: defn.info.declared_metaclass = declared_metaclass diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 97f811384d37..012d395e632f 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -255,6 +255,7 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> node.is_enum, node.is_protocol, node.fallback_to_any, + node.meta_fallback_to_any, node.is_named_tuple, node.is_newtype, # We need this to e.g. trigger metaclass calculation in subclasses. diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 0214e7ae308a..4bf3672af740 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1167,7 +1167,7 @@ def find_member( if isinstance(getattr_type, CallableType): return getattr_type.ret_type return getattr_type - if itype.type.fallback_to_any: + if itype.type.fallback_to_any or class_obj and itype.type.meta_fallback_to_any: return AnyType(TypeOfAny.special_form) if isinstance(v, TypeInfo): # PEP 544 doesn't specify anything about such use cases. So we just try diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 9a38d8f344f7..f1af13923fd7 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -4439,7 +4439,7 @@ def f(TB: Type[B]): reveal_type(TB.x) # N: Revealed type is "builtins.int" [case testMetaclassAsAny] -from typing import Any, ClassVar +from typing import Any, ClassVar, Type MyAny: Any class WithMeta(metaclass=MyAny): @@ -4451,13 +4451,15 @@ reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" WithMeta().m # E: "WithMeta" has no attribute "m" WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK [case testMetaclassAsAnyWithAFlag] # flags: --disallow-subclassing-any -from typing import Any, ClassVar +from typing import Any, ClassVar, Type MyAny: Any -class WithMeta(metaclass=MyAny): # E: Class cannot use "__main__.MyAny" as a metaclass (has type "Any") +class WithMeta(metaclass=MyAny): # E: Class cannot use "MyAny" as a metaclass (has type "Any") x: ClassVar[int] reveal_type(WithMeta.a) # N: Revealed type is "Any" @@ -4466,6 +4468,8 @@ reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" WithMeta().m # E: "WithMeta" has no attribute "m" WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK [case testMetaclassIterable] from typing import Iterable, Iterator diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 60917db041a1..1aff1ba2862f 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6348,3 +6348,14 @@ class C(B): self.x = self.foo() [out] [out2] + +[case testNoCrashIncrementalMetaAny] +import a +[file a.py] +from m import Foo +[file a.py.2] +from m import Foo +# touch +[file m.py] +from missing_module import Meta # type: ignore[import] +class Foo(metaclass=Meta): ... diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index d4b2d3469871..ed33776af438 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -3124,7 +3124,6 @@ whatever: int [out] == b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" [case testFixMissingMetaclass] import a @@ -3143,7 +3142,6 @@ class M(type): x: int [out] b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" == [case testGoodMetaclassSpoiled] From a08388cf6de053f659a9d663387f8f71e68664d8 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 22 Jan 2023 11:22:06 +0000 Subject: [PATCH 196/292] Fix crash in astdiff and clean it up (#14497) Ref #14329 This fixes one of the crashes reported in the issue. In fact, using recursive type caught this crash statically, plus another subtle crash in `snapshot_optional_type()`, _without a single false positive_ (I was able to cleanly type also symbol table snapshots, but decided it is not worth the churn since we only ever compare them with `==`, supported by ~every Python object). I feel triumphant :-) --- mypy/server/astdiff.py | 30 ++++++++++++++++++------------ mypy/server/update.py | 10 +++++++--- test-data/unit/fine-grained.test | 28 ++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 15 deletions(-) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 012d395e632f..40b60f1a69d8 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -52,7 +52,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' from __future__ import annotations -from typing import Sequence, Tuple, cast +from typing import Sequence, Tuple, Union, cast from typing_extensions import TypeAlias as _TypeAlias from mypy.expandtype import expand_type @@ -109,11 +109,17 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' # snapshots are immutable). # # For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). -SnapshotItem: _TypeAlias = Tuple[object, ...] + +# Type snapshots are strict, they must be hashable and ordered (e.g. for Unions). +Primitive: _TypeAlias = Union[str, float, int, bool] # float is for Literal[3.14] support. +SnapshotItem: _TypeAlias = Tuple[Union[Primitive, "SnapshotItem"], ...] + +# Symbol snapshots can be more lenient. +SymbolSnapshot: _TypeAlias = Tuple[object, ...] def compare_symbol_table_snapshots( - name_prefix: str, snapshot1: dict[str, SnapshotItem], snapshot2: dict[str, SnapshotItem] + name_prefix: str, snapshot1: dict[str, SymbolSnapshot], snapshot2: dict[str, SymbolSnapshot] ) -> set[str]: """Return names that are different in two snapshots of a symbol table. @@ -155,7 +161,7 @@ def compare_symbol_table_snapshots( return triggers -def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SnapshotItem]: +def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SymbolSnapshot]: """Create a snapshot description that represents the state of a symbol table. The snapshot has a representation based on nested tuples and dicts @@ -165,7 +171,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna things defined in other modules are represented just by the names of the targets. """ - result: dict[str, SnapshotItem] = {} + result: dict[str, SymbolSnapshot] = {} for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? @@ -206,7 +212,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna return result -def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> tuple[object, ...]: +def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> SymbolSnapshot: """Create a snapshot description of a symbol table node. The representation is nested tuples and dicts. Only externally @@ -290,11 +296,11 @@ def snapshot_type(typ: Type) -> SnapshotItem: return typ.accept(SnapshotTypeVisitor()) -def snapshot_optional_type(typ: Type | None) -> SnapshotItem | None: +def snapshot_optional_type(typ: Type | None) -> SnapshotItem: if typ: return snapshot_type(typ) else: - return None + return ("",) def snapshot_types(types: Sequence[Type]) -> SnapshotItem: @@ -396,7 +402,7 @@ def visit_parameters(self, typ: Parameters) -> SnapshotItem: "Parameters", snapshot_types(typ.arg_types), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), ) def visit_callable_type(self, typ: CallableType) -> SnapshotItem: @@ -407,7 +413,7 @@ def visit_callable_type(self, typ: CallableType) -> SnapshotItem: snapshot_types(typ.arg_types), snapshot_type(typ.ret_type), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), typ.is_type_obj(), typ.is_ellipsis_args, snapshot_types(typ.variables), @@ -464,7 +470,7 @@ def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem: return ("TypeAliasType", typ.alias.fullname, snapshot_types(typ.args)) -def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[object, ...]: +def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> SymbolSnapshot: """Create a snapshot of the signature of a function that has no explicit signature. If the arguments to a function without signature change, it must be @@ -476,7 +482,7 @@ def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[obje if isinstance(func, FuncItem): return (tuple(func.arg_names), tuple(func.arg_kinds)) else: - result = [] + result: list[SymbolSnapshot] = [] for item in func.items: if isinstance(item, Decorator): if item.var.type: diff --git a/mypy/server/update.py b/mypy/server/update.py index 83cce22873a1..00b823c99dfd 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -151,7 +151,11 @@ semantic_analysis_for_scc, semantic_analysis_for_targets, ) -from mypy.server.astdiff import SnapshotItem, compare_symbol_table_snapshots, snapshot_symbol_table +from mypy.server.astdiff import ( + SymbolSnapshot, + compare_symbol_table_snapshots, + snapshot_symbol_table, +) from mypy.server.astmerge import merge_asts from mypy.server.aststrip import SavedAttributes, strip_target from mypy.server.deps import get_dependencies_of_target, merge_dependencies @@ -417,7 +421,7 @@ def update_module( t0 = time.time() # Record symbol table snapshot of old version the changed module. - old_snapshots: dict[str, dict[str, SnapshotItem]] = {} + old_snapshots: dict[str, dict[str, SymbolSnapshot]] = {} if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot @@ -751,7 +755,7 @@ def get_sources( def calculate_active_triggers( manager: BuildManager, - old_snapshots: dict[str, dict[str, SnapshotItem]], + old_snapshots: dict[str, dict[str, SymbolSnapshot]], new_modules: dict[str, MypyFile | None], ) -> set[str]: """Determine activated triggers by comparing old and new symbol tables. diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index ed33776af438..d47c21283c91 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10313,3 +10313,31 @@ a.py:3: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#varia a.py:4: note: Revealed type is "A?" == a.py:4: note: Revealed type is "Union[builtins.str, builtins.int]" + +[case testUnionOfSimilarCallablesCrash] +import b + +[file b.py] +from a import x + +[file m.py] +from typing import Union, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +def foo(x: T, y: S) -> Union[T, S]: ... +def f(x: int) -> int: ... +def g(*x: int) -> int: ... + +[file a.py] +from m import f, g, foo +x = foo(f, g) + +[file a.py.2] +from m import f, g, foo +x = foo(f, g) +reveal_type(x) +[builtins fixtures/tuple.pyi] +[out] +== +a.py:3: note: Revealed type is "Union[def (x: builtins.int) -> builtins.int, def (*x: builtins.int) -> builtins.int]" From cb14d6f0cf8e434928557d0f0c73c8a9c7e18c52 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 22 Jan 2023 05:36:59 -0800 Subject: [PATCH 197/292] stubgen: treat dlls as c modules (#14503) Fixes #14028 --- mypy/moduleinspect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index feca1f43abf2..b383fc9dc145 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -36,7 +36,7 @@ def is_c_module(module: ModuleType) -> bool: # Could be a namespace package. These must be handled through # introspection, since there is no source file. return True - return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd"] + return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"] class InspectError(Exception): From d8418599f3a7d9af9b40a15b6a5c5d73fe51ac85 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 23 Jan 2023 10:25:34 +0000 Subject: [PATCH 198/292] Properly support union of TypedDicts as dict literal context (#14505) Fixes #14481 (regression) Fixes #13274 Fixes #8533 Most notably, if literal matches multiple items in union, it is not an error, it is only an error if it matches none of them, so I adjust the error message accordingly. An import caveat is that an unrelated error like `{"key": 42 + "no"}` can cause no item to match (an hence an extra error), but I think it is fine, since we still show the actual error, and avoiding this would require some dirty hacks. Also note there was an (obvious) bug in one of the fixtures, that caused one of repros not repro in tests, fixing it required tweaking an unrelated test. --- mypy/checkexpr.py | 56 ++++++++++------- mypy/messages.py | 4 +- test-data/unit/check-typeddict.test | 96 +++++++++++++++++++++++++++-- test-data/unit/check-unions.test | 6 +- test-data/unit/fixtures/dict.pyi | 2 +- 5 files changed, 131 insertions(+), 33 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 1c25b8ea7a12..c2cf226ef210 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4188,6 +4188,17 @@ def fast_dict_type(self, e: DictExpr) -> Type | None: self.resolved_type[e] = dt return dt + def check_typeddict_literal_in_context( + self, e: DictExpr, typeddict_context: TypedDictType + ) -> Type: + orig_ret_type = self.check_typeddict_call_with_dict( + callee=typeddict_context, kwargs=e, context=e, orig_callee=None + ) + ret_type = get_proper_type(orig_ret_type) + if isinstance(ret_type, TypedDictType): + return ret_type.copy_modified() + return typeddict_context.copy_modified() + def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. @@ -4197,15 +4208,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested - typeddict_context = self.find_typeddict_context(self.type_context[-1], e) - if typeddict_context: - orig_ret_type = self.check_typeddict_call_with_dict( - callee=typeddict_context, kwargs=e, context=e, orig_callee=None - ) - ret_type = get_proper_type(orig_ret_type) - if isinstance(ret_type, TypedDictType): - return ret_type.copy_modified() - return typeddict_context.copy_modified() + typeddict_contexts = self.find_typeddict_context(self.type_context[-1], e) + if typeddict_contexts: + if len(typeddict_contexts) == 1: + return self.check_typeddict_literal_in_context(e, typeddict_contexts[0]) + # Multiple items union, check if at least one of them matches cleanly. + for typeddict_context in typeddict_contexts: + with self.msg.filter_errors() as err, self.chk.local_type_map() as tmap: + ret_type = self.check_typeddict_literal_in_context(e, typeddict_context) + if err.has_new_errors(): + continue + self.chk.store_types(tmap) + return ret_type + # No item matched without an error, so we can't unambiguously choose the item. + self.msg.typeddict_context_ambiguous(typeddict_contexts, e) # fast path attempt dt = self.fast_dict_type(e) @@ -4271,26 +4287,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: def find_typeddict_context( self, context: Type | None, dict_expr: DictExpr - ) -> TypedDictType | None: + ) -> list[TypedDictType]: context = get_proper_type(context) if isinstance(context, TypedDictType): - return context + return [context] elif isinstance(context, UnionType): items = [] for item in context.items: - item_context = self.find_typeddict_context(item, dict_expr) - if item_context is not None and self.match_typeddict_call_with_dict( - item_context, dict_expr, dict_expr - ): - items.append(item_context) - if len(items) == 1: - # Only one union item is valid TypedDict for the given dict_expr, so use the - # context as it's unambiguous. - return items[0] - if len(items) > 1: - self.msg.typeddict_context_ambiguous(items, dict_expr) + item_contexts = self.find_typeddict_context(item, dict_expr) + for item_context in item_contexts: + if self.match_typeddict_call_with_dict(item_context, dict_expr, dict_expr): + items.append(item_context) + return items # No TypedDict type in context. - return None + return [] def visit_lambda_expr(self, e: LambdaExpr) -> Type: """Type check lambda expression.""" diff --git a/mypy/messages.py b/mypy/messages.py index 5d8bf79ec8a3..94a97f696b6c 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1705,7 +1705,9 @@ def typeddict_key_not_found( def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: formatted_types = ", ".join(list(format_type_distinctly(*types))) - self.fail(f"Type of TypedDict is ambiguous, could be any of ({formatted_types})", context) + self.fail( + f"Type of TypedDict is ambiguous, none of ({formatted_types}) matches cleanly", context + ) def typeddict_key_cannot_be_deleted( self, typ: TypedDictType, item_name: str, context: Context diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index e426b8a7630b..70ff6a4a6759 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -895,15 +895,25 @@ c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} reveal_type(c) # N: Revealed type is "Union[TypedDict('__main__.A', {'@type': Literal['a-type'], 'a': builtins.str}), TypedDict('__main__.B', {'@type': Literal['b-type'], 'b': builtins.int})]" [builtins fixtures/dict.pyi] -[case testTypedDictUnionAmbiguousCase] +[case testTypedDictUnionAmbiguousCaseBothMatch] from typing import Union, Mapping, Any, cast from typing_extensions import TypedDict, Literal -A = TypedDict('A', {'@type': Literal['a-type'], 'a': str}) -B = TypedDict('B', {'@type': Literal['a-type'], 'a': str}) +A = TypedDict('A', {'@type': Literal['a-type'], 'value': str}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': str}) + +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} +[builtins fixtures/dict.pyi] + +[case testTypedDictUnionAmbiguousCaseNoMatch] +from typing import Union, Mapping, Any, cast +from typing_extensions import TypedDict, Literal -c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} # E: Type of TypedDict is ambiguous, could be any of ("A", "B") \ - # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") +A = TypedDict('A', {'@type': Literal['a-type'], 'value': int}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': int}) + +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") [builtins fixtures/dict.pyi] -- Use dict literals @@ -2786,3 +2796,79 @@ TDC = TypedDict("TDC", {"val": int, "next": Optional[Self]}) # E: Self type can [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsInferred] +from typing import TypedDict, Dict + +D = TypedDict("D", {"foo": int}, total=False) + +def f(d: Dict[str, D]) -> None: + args = d["a"] + args.update(d.get("b", {})) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDeclared] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + name: str +class B(TypedDict, total=False): + name: str + +def foo(data: Union[A, B]) -> None: ... +foo({"name": "Robert"}) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsEmpty] +from typing import TypedDict, Union + +class Foo(TypedDict, total=False): + foo: str +class Bar(TypedDict, total=False): + bar: str + +def foo(body: Union[Foo, Bar] = {}) -> None: # OK + ... +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDistinct] +from typing import TypedDict, Union, Literal + +class A(TypedDict): + type: Literal['a'] + value: bool +class B(TypedDict): + type: Literal['b'] + value: str + +Response = Union[A, B] +def method(message: Response) -> None: ... + +method({'type': 'a', 'value': True}) # OK +method({'type': 'b', 'value': 'abc'}) # OK +method({'type': 'a', 'value': 'abc'}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "method" has incompatible type "Dict[str, str]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsNested] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + foo: C +class B(TypedDict, total=False): + foo: D +class C(TypedDict, total=False): + c: str +class D(TypedDict, total=False): + d: str + +def foo(data: Union[A, B]) -> None: ... +foo({"foo": {"c": "foo"}}) # OK +foo({"foo": {"e": "foo"}}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "foo" has incompatible type "Dict[str, Dict[str, str]]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 4c4fbc32ec3f..cabc28e786b2 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -971,14 +971,14 @@ if x: [builtins fixtures/dict.pyi] [out] -[case testUnpackUnionNoCrashOnPartialNoneList] +[case testUnpackUnionNoCrashOnPartialList] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] -x, _ = d.get(a, ([], [])) -reveal_type(x) # N: Revealed type is "Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[]]" +x, _ = d.get(a, ([], "")) +reveal_type(x) # N: Revealed type is "builtins.list[Tuple[builtins.str, builtins.str]]" for y in x: pass [builtins fixtures/dict.pyi] diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index f4ec15e4fa9a..856b8b7266c1 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -29,7 +29,7 @@ class dict(Mapping[KT, VT]): @overload def get(self, k: KT) -> Optional[VT]: pass @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass + def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... class int: # for convenience From 9bbb93cf69f81ea1fc33f223ff03cf2a1b604bc5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Jan 2023 10:30:54 +0000 Subject: [PATCH 199/292] Typeshed cherry-pick: Add __eq__ to types.MappingProxyType (#9580) (#14507) This fixes a false positive when using `--strict-equality`. See https://github.com/python/typeshed/commit/d5b88c552cd7530b598b6369fef66bda745de93d for context. --- mypy/typeshed/stdlib/types.pyi | 1 + 1 file changed, 1 insertion(+) diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 6928032f92b1..e3e6418347b1 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -310,6 +310,7 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): def __getitem__(self, __key: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... def copy(self) -> dict[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... From dcf910e05fdcff00b1fb9a8b9ca85f9a6667b946 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 23 Jan 2023 11:45:03 +0000 Subject: [PATCH 200/292] Fix crash in daemon mode on new import cycle (#14508) Fixes #14329 This fixes the second crash reported in the issue (other one is already fixed). This one is tricky, it looks like it happens only when we bring in a new import cycle in an incremental update with `--follow-import=normal`. To explain the reason, a little reminder of how semantic analyzer passes work: * Originally, we recorded progress automatically when some new symbol was resolved and added to symbol tables. * With implementation of recursive types, this mechanism was insufficient, as recursive types require modifying some symbols _in place_, this is why `force_progress` flag was added to `defer()`. * I was only careful with this flag for recursive type aliases (that were implemented first), for other things (like recursive TypedDicts, etc) I just always passed `force_progress=True` (without checking if we actually resolved some placeholder types). * The reasoning for that is if we ever add `becomes_typeinfo=True`, there is no way this symbol will later be unresolved (otherwise how would we know this is something that is a type). * It turns out this reasoning doesn't work in some edge cases in daemon mode, we do put some placeholders with `becomes_typeinfo=True` for symbols imported from modules that were not yet processed, thus causing a crash (see test cases). * There were two options to fix this: one is to stop creating placeholders with `becomes_typeinfo=True` for unimported symbols in daemon mode, other one is to always carefully check if in-place update of a symbol actually resulted in progress. * Ultimately I decided that the first way is too fragile (and I don't understand how import following works for daemon anyway), and the second way is something that is technically correct anyway, so here is this PR I didn't add test cases for each of the crash scenarios, since they are all very similar. I only added two that I encountered "in the wild", upper bound and tuple base caused actual crash in `trio` stubs, plus also randomly a test for a TypedDict crash. _EDIT:_ and one more thing, the "cannot resolve name" error should never appear in normal mode, only in daemon update (see reasoning above), so I don't make those error messages detailed, just add some minimal info if we will need to debug them. --- mypy/semanal.py | 28 +++++-- mypy/semanal_namedtuple.py | 4 +- mypy/semanal_newtype.py | 10 ++- mypy/semanal_shared.py | 6 ++ mypy/semanal_typeddict.py | 4 +- mypy/types.py | 8 ++ .../unit/fine-grained-follow-imports.test | 77 +++++++++++++++++++ 7 files changed, 126 insertions(+), 11 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 5653aa4547c4..34cb45194d19 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1049,7 +1049,12 @@ def setup_self_type(self) -> None: if info.self_type is not None: if has_placeholder(info.self_type.upper_bound): # Similar to regular (user defined) type variables. - self.defer(force_progress=True) + self.process_placeholder( + None, + "Self upper bound", + info, + force_progress=info.self_type.upper_bound != fill_typevars(info), + ) else: return info.self_type = TypeVarType("Self", f"{info.fullname}.Self", 0, [], fill_typevars(info)) @@ -2132,7 +2137,9 @@ def configure_tuple_base_class(self, defn: ClassDef, base: TupleType) -> Instanc self.fail("Class has two incompatible bases derived from tuple", defn) defn.has_incompatible_baseclass = True if info.special_alias and has_placeholder(info.special_alias.target): - self.defer(force_progress=True) + self.process_placeholder( + None, "tuple base", defn, force_progress=base != info.tuple_type + ) info.update_tuple_type(base) self.setup_alias_type_vars(defn) @@ -3913,12 +3920,16 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: type_var = TypeVarExpr(name, self.qualified_name(name), values, upper_bound, variance) type_var.line = call.line call.analyzed = type_var + updated = True else: assert isinstance(call.analyzed, TypeVarExpr) + updated = values != call.analyzed.values or upper_bound != call.analyzed.upper_bound call.analyzed.upper_bound = upper_bound call.analyzed.values = values if any(has_placeholder(v) for v in values) or has_placeholder(upper_bound): - self.defer(force_progress=True) + self.process_placeholder( + None, f"TypeVar {'values' if values else 'upper bound'}", s, force_progress=updated + ) self.add_symbol(name, call.analyzed, s) return True @@ -5931,7 +5942,9 @@ def is_incomplete_namespace(self, fullname: str) -> bool: """ return fullname in self.incomplete_namespaces - def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: """Process a reference targeting placeholder node. If this is not a final iteration, defer current node, @@ -5943,10 +5956,11 @@ def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: if self.final_iteration: self.cannot_resolve_name(name, kind, ctx) else: - self.defer(ctx) + self.defer(ctx, force_progress=force_progress) - def cannot_resolve_name(self, name: str, kind: str, ctx: Context) -> None: - self.fail(f'Cannot resolve {kind} "{name}" (possible cyclic definition)', ctx) + def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None: + name_format = f' "{name}"' if name else "" + self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx) if not self.options.disable_recursive_aliases and self.is_func_scope(): self.note("Recursive types are not allowed at function scope", ctx) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index ec5f13d0fce0..226c2e50326b 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -501,7 +501,9 @@ def build_namedtuple_typeinfo( info.is_named_tuple = True tuple_base = TupleType(types, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "NamedTuple item", info, force_progress=tuple_base != info.tuple_type + ) info.update_tuple_type(tuple_base) info.line = line # For use by mypyc. diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index b6fb64532e6e..cb1055a62186 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -249,10 +249,16 @@ def build_newtype_typeinfo( init_func = FuncDef("__init__", args, Block([]), typ=signature) init_func.info = info init_func._fullname = info.fullname + ".__init__" + if not existing_info: + updated = True + else: + previous_sym = info.names["__init__"].node + assert isinstance(previous_sym, FuncDef) + updated = old_type != previous_sym.arguments[1].variable.type info.names["__init__"] = SymbolTableNode(MDEF, init_func) - if has_placeholder(old_type) or info.tuple_type and has_placeholder(info.tuple_type): - self.api.defer(force_progress=True) + if has_placeholder(old_type): + self.api.process_placeholder(None, "NewType base", info, force_progress=updated) return info # Helpers diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index e5be4aa55cd3..f4bc173b52d5 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -232,6 +232,12 @@ def qualified_name(self, n: str) -> str: def is_typeshed_stub_file(self) -> bool: raise NotImplementedError + @abstractmethod + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: + raise NotImplementedError + def set_callable_name(sig: Type, fdef: FuncDef) -> ProperType: sig = get_proper_type(sig) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index cd3d02bc6bb8..55618318c1e8 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -535,7 +535,9 @@ def build_typeddict_typeinfo( info = existing_info or self.api.basic_new_typeinfo(name, fallback, line) typeddict_type = TypedDictType(dict(zip(items, types)), required_keys, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "TypedDict item", info, force_progress=typeddict_type != info.typeddict_type + ) info.update_typeddict_type(typeddict_type) return info diff --git a/mypy/types.py b/mypy/types.py index 7af83b6c11d3..bf610a01b63b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2857,6 +2857,14 @@ def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return cast(T, visitor.visit_placeholder_type(self)) + def __hash__(self) -> int: + return hash((self.fullname, tuple(self.args))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PlaceholderType): + return NotImplemented + return self.fullname == other.fullname and self.args == other.args + def serialize(self) -> str: # We should never get here since all placeholders should be replaced # during semantic analysis. diff --git a/test-data/unit/fine-grained-follow-imports.test b/test-data/unit/fine-grained-follow-imports.test index ebe8b86b37ab..22f2a7895cf9 100644 --- a/test-data/unit/fine-grained-follow-imports.test +++ b/test-data/unit/fine-grained-follow-imports.test @@ -769,3 +769,80 @@ from . import mod3 == main.py:1: error: Cannot find implementation or library stub for module named "pkg" main.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports + +[case testNewImportCycleTypeVarBound] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +import trio +from . import abc as abc + +T = TypeVar("T", bound=trio.abc.A) + +[file trio/abc.py.2] +import trio +class A: ... +[out] +== + +[case testNewImportCycleTupleBase] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar, Tuple +import trio +from . import abc as abc + +class C(Tuple[trio.abc.A, trio.abc.A]): ... + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/tuple.pyi] +[out] +== + +[case testNewImportCycleTypedDict] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +from typing_extensions import TypedDict +import trio +from . import abc as abc + +class C(TypedDict): + x: trio.abc.A + y: trio.abc.A + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/dict.pyi] +[out] +== From 77f872568998bf54a24f497fef72893a60d84633 Mon Sep 17 00:00:00 2001 From: Alessio Izzo Date: Mon, 23 Jan 2023 14:37:52 +0100 Subject: [PATCH 201/292] Fix missing self check dependency on 3.11 (#14492) Fixes #14487 --- test-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/test-requirements.txt b/test-requirements.txt index ac965f4abc52..aec11e87e96f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -17,3 +17,4 @@ pytest-cov>=2.10.0 py>=1.5.2 setuptools>=65.5.1 six +tomli>=1.1.0 From db440ab063c3f01819a29d45e3e2288562f39891 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Jan 2023 14:34:19 +0000 Subject: [PATCH 202/292] Don't consider 'object' always truthy (#14510) There are two reasons I'm proposing this change. First, we know that many subclasses of 'object' can be falsy. Second, mypy sometimes simplifies `object | Any` into just `object`. The latter was considered always truthy, while the prior one wasn't. Now both of them are treated consistently. An alternative fix would be to not simplify unions like `object | Any`, but this seems a bit ad hoc. This only has an effect when the `truthy-bool` error code is explicitly enabled. Fixes #14480. This doesn't just fix the regression but fixes a more general issue. --- mypy/checker.py | 1 + test-data/unit/check-errorcodes.test | 26 ++++++++++++++++++++++---- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 61104756b297..46200f5813cc 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5151,6 +5151,7 @@ def _is_truthy_type(self, t: ProperType) -> bool: and bool(t.type) and not t.type.has_readable_member("__bool__") and not t.type.has_readable_member("__len__") + and t.type.fullname != "builtins.object" ) or isinstance(t, FunctionLike) or ( diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index d966eb44b6e3..19ce56057ff5 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -803,12 +803,15 @@ from typing_extensions import TypedDict Foo = TypedDict("Bar", {}) # E: First argument "Bar" to TypedDict() does not match variable name "Foo" [name-match] [builtins fixtures/dict.pyi] + [case testTruthyBool] # flags: --enable-error-code truthy-bool -from typing import List, Union +from typing import List, Union, Any class Foo: pass +class Bar: + pass foo = Foo() if foo: # E: "__main__.foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] @@ -836,15 +839,30 @@ if good_union: if not good_union: pass -bad_union: Union[Foo, object] = Foo() -if bad_union: # E: "__main__.bad_union" has type "Union[Foo, object]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +bad_union: Union[Foo, Bar] = Foo() +if bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass +if not bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass + +# 'object' is special and is treated as potentially falsy +obj: object = Foo() +if obj: pass -if not bad_union: # E: "__main__.bad_union" has type "object" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +if not obj: pass lst: List[int] = [] if lst: pass + +a: Any +if a: + pass + +any_or_object: Union[object, Any] +if any_or_object: + pass [builtins fixtures/list.pyi] [case testTruthyFunctions] From 4de3f5d771fd159b69010e547a664a52ae41ce79 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Jan 2023 18:01:18 +0000 Subject: [PATCH 203/292] [mypyc] Make explicit conversions i64(x) and i32(x) faster (#14504) These behave the same as `int(x)` and we want them to be no slower than the corresponding `int` conversions. Optimize them for bool, float, str and RInstance arguments. Work on mypyc/mypyc#837. --- mypyc/irbuild/specialize.py | 12 +++- mypyc/primitives/int_ops.py | 66 +++++++++++---------- mypyc/test-data/irbuild-i32.test | 52 ++++++++++++++++ mypyc/test-data/irbuild-i64.test | 63 ++++++++++++++++++++ mypyc/test-data/run-i32.test | 16 +++++ mypyc/test-data/run-i64.test | 62 +++++++++++++++++++ test-data/unit/lib-stub/mypy_extensions.pyi | 9 ++- 7 files changed, 243 insertions(+), 37 deletions(-) diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index e62350778f54..8cb24c5b47da 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -160,6 +160,8 @@ def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Va @specialize_function("builtins.int") @specialize_function("builtins.float") @specialize_function("builtins.complex") +@specialize_function("mypy_extensions.i64") +@specialize_function("mypy_extensions.i32") def translate_builtins_with_unary_dunder( builder: IRBuilder, expr: CallExpr, callee: RefExpr ) -> Value | None: @@ -167,7 +169,11 @@ def translate_builtins_with_unary_dunder( if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(callee, NameExpr): arg = expr.args[0] arg_typ = builder.node_type(arg) - method = f"__{callee.name}__" + shortname = callee.fullname.split(".")[1] + if shortname in ("i64", "i32"): + method = "__int__" + else: + method = f"__{shortname}__" if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method(method): obj = builder.accept(arg) return builder.gen_method_call(obj, method, [], None, expr.line) @@ -676,7 +682,7 @@ def translate_i64(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value elif is_int32_rprimitive(arg_type): val = builder.accept(arg) return builder.add(Extend(val, int64_rprimitive, signed=True, line=expr.line)) - elif is_int_rprimitive(arg_type): + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): val = builder.accept(arg) return builder.coerce(val, int64_rprimitive, expr.line) return None @@ -693,7 +699,7 @@ def translate_i32(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value elif is_int64_rprimitive(arg_type): val = builder.accept(arg) return builder.add(Truncate(val, int32_rprimitive, line=expr.line)) - elif is_int_rprimitive(arg_type): + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): val = builder.accept(arg) return builder.coerce(val, int32_rprimitive, expr.line) return None diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 382bceb217f4..7eda9bab7e3c 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -35,39 +35,43 @@ unary_op, ) -# These int constructors produce object_rprimitives that then need to be unboxed -# I guess unboxing ourselves would save a check and branch though? - -# Get the type object for 'builtins.int'. -# For ordinary calls to int() we use a load_address to the type -load_address_op(name="builtins.int", type=object_rprimitive, src="PyLong_Type") - -# int(float). We could do a bit better directly. -function_op( - name="builtins.int", - arg_types=[float_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromFloat", - error_kind=ERR_MAGIC, -) +# Constructors for builtins.int and native int types have the same behavior. In +# interpreted mode, native int types are just aliases to 'int'. +for int_name in ("builtins.int", "mypy_extensions.i64", "mypy_extensions.i32"): + # These int constructors produce object_rprimitives that then need to be unboxed + # I guess unboxing ourselves would save a check and branch though? + + # Get the type object for 'builtins.int' or a native int type. + # For ordinary calls to int() we use a load_address to the type. + # Native ints don't have a separate type object -- we just use 'builtins.int'. + load_address_op(name=int_name, type=object_rprimitive, src="PyLong_Type") + + # int(float). We could do a bit better directly. + function_op( + name=int_name, + arg_types=[float_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromFloat", + error_kind=ERR_MAGIC, + ) -# int(string) -function_op( - name="builtins.int", - arg_types=[str_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStr", - error_kind=ERR_MAGIC, -) + # int(string) + function_op( + name=int_name, + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStr", + error_kind=ERR_MAGIC, + ) -# int(string, base) -function_op( - name="builtins.int", - arg_types=[str_rprimitive, int_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStrWithBase", - error_kind=ERR_MAGIC, -) + # int(string, base) + function_op( + name=int_name, + arg_types=[str_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStrWithBase", + error_kind=ERR_MAGIC, + ) # str(int) int_to_str_op = function_op( diff --git a/mypyc/test-data/irbuild-i32.test b/mypyc/test-data/irbuild-i32.test index 818c3138e4e3..7ea3c0864728 100644 --- a/mypyc/test-data/irbuild-i32.test +++ b/mypyc/test-data/irbuild-i32.test @@ -480,3 +480,55 @@ L0: y = 11 z = -3 return 1 + +[case testI32ExplicitConversionFromVariousTypes] +from mypy_extensions import i32 + +def bool_to_i32(b: bool) -> i32: + return i32(b) + +def str_to_i32(s: str) -> i32: + return i32(s) + +class C: + def __int__(self) -> i32: + return 5 + +def instance_to_i32(c: C) -> i32: + return i32(c) + +def float_to_i32(x: float) -> i32: + return i32(x) +[out] +def bool_to_i32(b): + b :: bool + r0 :: int32 +L0: + r0 = extend b: builtins.bool to int32 + return r0 +def str_to_i32(s): + s :: str + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int32, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i32(c): + c :: __main__.C + r0 :: int32 +L0: + r0 = c.__int__() + return r0 +def float_to_i32(x): + x :: float + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int32, r0) + return r1 diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index 2c4052fa4796..47802d8e0c97 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1771,3 +1771,66 @@ L2: keep_alive x L3: return r3 + +[case testI64ExplicitConversionFromVariousTypes] +from mypy_extensions import i64 + +def bool_to_i64(b: bool) -> i64: + return i64(b) + +def str_to_i64(s: str) -> i64: + return i64(s) + +def str_to_i64_with_base(s: str) -> i64: + return i64(s, 2) + +class C: + def __int__(self) -> i64: + return 5 + +def instance_to_i64(c: C) -> i64: + return i64(c) + +def float_to_i64(x: float) -> i64: + return i64(x) +[out] +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def str_to_i64(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int64, r0) + return r1 +def str_to_i64_with_base(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStrWithBase(s, 4) + r1 = unbox(int64, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i64(c): + c :: __main__.C + r0 :: int64 +L0: + r0 = c.__int__() + return r0 +def float_to_i64(x): + x :: float + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int64, r0) + return r1 diff --git a/mypyc/test-data/run-i32.test b/mypyc/test-data/run-i32.test index 3d2f3e59e83c..384e6bd4f02c 100644 --- a/mypyc/test-data/run-i32.test +++ b/mypyc/test-data/run-i32.test @@ -306,6 +306,22 @@ def test_i32_truncate_from_i64() -> None: x = i32(small2) assert x == 2**31 - 1 +def from_float(x: float) -> i32: + return i32(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**31 - 1) == 2**31 - 1 + assert from_float(-2**31) == -2**31 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i32"): + assert from_float(float(2**31)) + with assertRaises(OverflowError, "int too large to convert to i32"): + # One ulp below the lowest valid i64 value + from_float(float(-2**31 - 2048)) + def test_tuple_i32() -> None: a: i32 = 1 b: i32 = 2 diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index c2a218156e66..0fc4b91330d4 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -310,6 +310,68 @@ def test_i64_from_large_small_literal() -> None: x = i64(-2**63) assert x == -2**63 +def from_float(x: float) -> i64: + return i64(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**63 - 1) == 2**63 - 1 + assert from_float(-2**63) == -2**63 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i64"): + assert from_float(float(2**63)) + with assertRaises(OverflowError, "int too large to convert to i64"): + # One ulp below the lowest valid i64 value + from_float(float(-2**63 - 2048)) + +def from_str(s: str) -> i64: + return i64(s) + +def test_explicit_conversion_from_str() -> None: + assert from_str("0") == 0 + assert from_str("1") == 1 + assert from_str("-1234") == -1234 + with assertRaises(ValueError): + from_str("1.2") + +def from_str_with_base(s: str, base: int) -> i64: + return i64(s, base) + +def test_explicit_conversion_from_str_with_base() -> None: + assert from_str_with_base("101", 2) == 5 + assert from_str_with_base("109", 10) == 109 + assert from_str_with_base("-f0A", 16) == -3850 + assert from_str_with_base("0x1a", 16) == 26 + assert from_str_with_base("0X1A", 16) == 26 + with assertRaises(ValueError): + from_str_with_base("1.2", 16) + +def from_bool(b: bool) -> i64: + return i64(b) + +def test_explicit_conversion_from_bool() -> None: + assert from_bool(True) == 1 + assert from_bool(False) == 0 + +class IntConv: + def __init__(self, x: i64) -> None: + self.x = x + + def __int__(self) -> i64: + return self.x + 1 + +def test_explicit_conversion_from_instance() -> None: + assert i64(IntConv(0)) == 1 + assert i64(IntConv(12345)) == 12346 + assert i64(IntConv(-23)) == -22 + +def test_explicit_conversion_from_any() -> None: + # This can't be specialized + a: Any = "101" + assert i64(a, base=2) == 5 + def test_tuple_i64() -> None: a: i64 = 1 b: i64 = 2 diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi index 6274163c497d..d79be8719417 100644 --- a/test-data/unit/lib-stub/mypy_extensions.pyi +++ b/test-data/unit/lib-stub/mypy_extensions.pyi @@ -1,7 +1,7 @@ # NOTE: Requires fixtures/dict.pyi from typing import ( Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator, - Union + Union, Protocol ) import sys @@ -51,10 +51,13 @@ mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... if sys.version_info >= (3, 0): + class __SupportsInt(Protocol[T_co]): + def __int__(self) -> int: pass + _Int = Union[int, i32, i64] class i32: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i32) -> i32: ... def __radd__(self, x: i32) -> i32: ... def __sub__(self, x: i32) -> i32: ... @@ -84,7 +87,7 @@ if sys.version_info >= (3, 0): def __gt__(self, x: i32) -> bool: ... class i64: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i64) -> i64: ... def __radd__(self, x: i64) -> i64: ... def __sub__(self, x: i64) -> i64: ... From 9ca303501ecbb2f235b75961d3711c6fce657c0f Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Jan 2023 19:19:08 +0000 Subject: [PATCH 204/292] Fix strict equality check if operand item type has custom __eq__ (#14513) Don't complain about comparing lists, variable-length tuples or sets if one of the operands has an item type with a custom `__eq__` method. Fix #14511. --- mypy/checkexpr.py | 38 +++++++++++++++------------ test-data/unit/check-expressions.test | 18 +++++++++++++ test-data/unit/fixtures/bool.pyi | 2 +- test-data/unit/fixtures/set.pyi | 1 + 4 files changed, 41 insertions(+), 18 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index c2cf226ef210..8dea7d0e8551 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2988,20 +2988,14 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: # testCustomEqCheckStrictEquality for an example. if not w.has_new_errors() and operator in ("==", "!="): right_type = self.accept(right) - # We suppress the error if there is a custom __eq__() method on either - # side. User defined (or even standard library) classes can define this - # to return True for comparisons between non-overlapping types. - if not custom_special_method( - left_type, "__eq__" - ) and not custom_special_method(right_type, "__eq__"): - # Also flag non-overlapping literals in situations like: - # x: Literal['a', 'b'] - # if x == 'c': - # ... - left_type = try_getting_literal(left_type) - right_type = try_getting_literal(right_type) - if self.dangerous_comparison(left_type, right_type): - self.msg.dangerous_comparison(left_type, right_type, "equality", e) + # Also flag non-overlapping literals in situations like: + # x: Literal['a', 'b'] + # if x == 'c': + # ... + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) + if self.dangerous_comparison(left_type, right_type): + self.msg.dangerous_comparison(left_type, right_type, "equality", e) elif operator == "is" or operator == "is not": right_type = self.accept(right) # validate the right operand @@ -3064,6 +3058,12 @@ def dangerous_comparison( left, right = get_proper_types((left, right)) + # We suppress the error if there is a custom __eq__() method on either + # side. User defined (or even standard library) classes can define this + # to return True for comparisons between non-overlapping types. + if custom_special_method(left, "__eq__") or custom_special_method(right, "__eq__"): + return False + if self.chk.binder.is_unreachable_warning_suppressed(): # We are inside a function that contains type variables with value restrictions in # its signature. In this case we just suppress all strict-equality checks to avoid @@ -3094,14 +3094,18 @@ def dangerous_comparison( return False if isinstance(left, Instance) and isinstance(right, Instance): # Special case some builtin implementations of AbstractSet. + left_name = left.type.fullname + right_name = right.type.fullname if ( - left.type.fullname in OVERLAPPING_TYPES_ALLOWLIST - and right.type.fullname in OVERLAPPING_TYPES_ALLOWLIST + left_name in OVERLAPPING_TYPES_ALLOWLIST + and right_name in OVERLAPPING_TYPES_ALLOWLIST ): abstract_set = self.chk.lookup_typeinfo("typing.AbstractSet") left = map_instance_to_supertype(left, abstract_set) right = map_instance_to_supertype(right, abstract_set) - return not is_overlapping_types(left.args[0], right.args[0]) + return self.dangerous_comparison(left.args[0], right.args[0]) + elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: + return self.dangerous_comparison(left.args[0], right.args[0]) if isinstance(left, LiteralType) and isinstance(right, LiteralType): if isinstance(left.value, bool) and isinstance(right.value, bool): # Comparing different booleans is not dangerous. diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 78ef78e9ad98..20ccbb17d5d5 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1985,6 +1985,24 @@ class B: A() == B() # E: Unsupported operand types for == ("A" and "B") [builtins fixtures/bool.pyi] +[case testStrictEqualitySequenceAndCustomEq] +# flags: --strict-equality +from typing import Tuple + +class C: pass +class D: + def __eq__(self, other): return True + +a = [C()] +b = [D()] +a == b +b == a +t1: Tuple[C, ...] +t2: Tuple[D, ...] +t1 == t2 +t2 == t1 +[builtins fixtures/bool.pyi] + [case testCustomEqCheckStrictEqualityOKInstance] # flags: --strict-equality class A: diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index 245526d78907..c48efcbd7269 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -16,5 +16,5 @@ class float: pass class str: pass class unicode: pass class ellipsis: pass -class list: pass +class list(Generic[T]): pass class property: pass diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi index 9852bbc9fcc6..d397d4f54af2 100644 --- a/test-data/unit/fixtures/set.pyi +++ b/test-data/unit/fixtures/set.pyi @@ -6,6 +6,7 @@ T = TypeVar('T') class object: def __init__(self) -> None: pass + def __eq__(self, other): pass class type: pass class tuple(Generic[T]): pass From eee3a2a9fdfa1cce7f031ef362570ab3ba5d9790 Mon Sep 17 00:00:00 2001 From: Lakshay Bisht Date: Tue, 24 Jan 2023 18:51:16 +0530 Subject: [PATCH 205/292] Remove references to unicode under test-data/unit (#14515) Fixes #14512. --- test-data/unit/fixtures/attr.pyi | 1 - test-data/unit/fixtures/bool.pyi | 1 - test-data/unit/fixtures/dict.pyi | 1 - test-data/unit/fixtures/exception.pyi | 1 - test-data/unit/fixtures/ops.pyi | 2 -- test-data/unit/fixtures/staticmethod.pyi | 1 - test-data/unit/fixtures/tuple.pyi | 1 - test-data/unit/fixtures/type.pyi | 1 - test-data/unit/lib-stub/__builtin__.pyi | 1 - 9 files changed, 10 deletions(-) diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/attr.pyi index c209abfef0d9..3ac535c21108 100644 --- a/test-data/unit/fixtures/attr.pyi +++ b/test-data/unit/fixtures/attr.pyi @@ -23,6 +23,5 @@ class complex: def __init__(self, real: str = ...) -> None: ... class str: pass -class unicode: pass class ellipsis: pass class tuple: pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index c48efcbd7269..0f6e1a174c7b 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -14,7 +14,6 @@ class int: pass class bool(int): pass class float: pass class str: pass -class unicode: pass class ellipsis: pass class list(Generic[T]): pass class property: pass diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index 856b8b7266c1..153832411f50 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -41,7 +41,6 @@ class int: # for convenience imag: int class str: pass # for keyword argument key type -class unicode: pass # needed for py2 docstrings class bytes: pass class list(Sequence[T]): # needed by some test cases diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index 1c88723e7191..70e3b19c4149 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -11,7 +11,6 @@ class tuple(Generic[T]): class function: pass class int: pass class str: pass -class unicode: pass class bool: pass class ellipsis: pass diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index d5845aba43c6..2b29414448cf 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -33,8 +33,6 @@ class str: def startswith(self, x: 'str') -> bool: pass def strip(self) -> 'str': pass -class unicode: pass - class int: def __add__(self, x: 'int') -> 'int': pass def __radd__(self, x: 'int') -> 'int': pass diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi index 7d5d98634e48..08fbda8ccf8f 100644 --- a/test-data/unit/fixtures/staticmethod.pyi +++ b/test-data/unit/fixtures/staticmethod.pyi @@ -16,6 +16,5 @@ class int: def from_bytes(bytes: bytes, byteorder: str) -> int: pass class str: pass -class unicode: pass class bytes: pass class ellipsis: pass diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 14e668375175..60e47dd02220 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -37,7 +37,6 @@ class bool(int): pass class str: pass # For convenience class bytes: pass class bytearray: pass -class unicode: pass class list(Sequence[T], Generic[T]): @overload diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 33dfb5475efa..39357a693638 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -24,5 +24,4 @@ class function: pass class bool: pass class int: pass class str: pass -class unicode: pass class ellipsis: pass diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi index e7109a179aac..f9ee7b74011d 100644 --- a/test-data/unit/lib-stub/__builtin__.pyi +++ b/test-data/unit/lib-stub/__builtin__.pyi @@ -18,7 +18,6 @@ class int: pass class float: pass class str: pass -class unicode: pass class tuple(Generic[_T]): pass class function: pass From 8b309132188ebbcb889300f9f2fc9bc47df3a3bd Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Tue, 24 Jan 2023 07:27:57 -0800 Subject: [PATCH 206/292] [used before def] rework builtin handling (#14483) When doing multiple passes, in the example below, `range` will refer to current's module range. When doing a single pass, `range` will refer to `builtins.range`: ```python _range = range _C = C # error: Name "C" is used before definition class range: pass class C: pass ``` Instead of looking at the output of semanal to check if a variable is resolving to a `builtins` package, we can just check if it's part of builtins module. Fixes #14476. --- mypy/build.py | 5 ++++- mypy/partially_defined.py | 20 +++++++++++++------- test-data/unit/check-possibly-undefined.test | 10 ++++++++++ 3 files changed, 27 insertions(+), 8 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 1747c4518c63..a4817d1866c7 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2359,7 +2359,10 @@ def detect_possibly_undefined_vars(self) -> None: ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): self.tree.accept( PossiblyUndefinedVariableVisitor( - MessageBuilder(manager.errors, manager.modules), self.type_map(), self.options + MessageBuilder(manager.errors, manager.modules), + self.type_map(), + self.options, + self.tree.names, ) ) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 9a58df04371f..af09493c9cae 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -27,12 +27,13 @@ ListExpr, Lvalue, MatchStmt, + MypyFile, NameExpr, NonlocalDecl, RaiseStmt, - RefExpr, ReturnStmt, StarExpr, + SymbolTable, TryStmt, TupleExpr, WhileStmt, @@ -286,10 +287,6 @@ def is_undefined(self, name: str) -> bool: return self._scope().branch_stmts[-1].is_undefined(name) -def refers_to_builtin(o: RefExpr) -> bool: - return o.fullname.startswith("builtins.") - - class Loop: def __init__(self) -> None: self.has_break = False @@ -314,11 +311,20 @@ class PossiblyUndefinedVariableVisitor(ExtendedTraverserVisitor): """ def __init__( - self, msg: MessageBuilder, type_map: dict[Expression, Type], options: Options + self, + msg: MessageBuilder, + type_map: dict[Expression, Type], + options: Options, + names: SymbolTable, ) -> None: self.msg = msg self.type_map = type_map self.options = options + self.builtins = SymbolTable() + builtins_mod = names.get("__builtins__", None) + if builtins_mod: + assert isinstance(builtins_mod.node, MypyFile) + self.builtins = builtins_mod.node.names self.loops: list[Loop] = [] self.try_depth = 0 self.tracker = DefinedVariableTracker() @@ -597,7 +603,7 @@ def visit_starred_pattern(self, o: StarredPattern) -> None: super().visit_starred_pattern(o) def visit_name_expr(self, o: NameExpr) -> None: - if refers_to_builtin(o): + if o.name in self.builtins: return if self.tracker.is_possibly_undefined(o.name): # A variable is only defined in some branches. diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index 802635c30b35..29c4868e97af 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -909,6 +909,16 @@ def f0() -> None: type = "abc" a = type +[case testUsedBeforeDefBuiltinsMultipass] +# flags: --enable-error-code used-before-def + +# When doing multiple passes, mypy resolves references slightly differently. +# In this case, it would refer the earlier `type` call to the range class defined below. +_type = type # No error +_C = C # E: Name "C" is used before definition +class type: pass +class C: pass + [case testUsedBeforeDefImplicitModuleAttrs] # flags: --enable-error-code used-before-def a = __name__ # No error. From 757e0d4894d68b43810d5723ad496c268a850468 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 24 Jan 2023 16:40:24 +0000 Subject: [PATCH 207/292] [mypyc] Support inheriting native int attributes from traits (#14490) Regular trait attributes are accessed via looking up the field offset from a vtable. This doesn't work with native ints, since they may also require access to a defined attributes bitmap, and also looking that up from a vtable would be too complicated. Work around this may always accessing trait native int attributes using accessor methods. These can raise an exception if an attribute is undefined. Add empty accessor methods in traits for each attribute with overlapping error values. Also synthesize real accessors in each concrete subclass. When accessing the attribute using a concrete subclass, still prefer direct field and bitmap access. Only attribute access through a trait type requires accessors to be used. Work on mypyc/mypyc#837. --- mypyc/analysis/attrdefined.py | 9 +++++ mypyc/codegen/emitfunc.py | 3 +- mypyc/ir/class_ir.py | 7 ++-- mypyc/irbuild/classdef.py | 6 ++-- mypyc/irbuild/function.py | 18 +++++++--- mypyc/irbuild/prepare.py | 23 ++++++++++++- mypyc/irbuild/vtable.py | 4 +-- mypyc/test-data/run-i64.test | 62 +++++++++++++++++++++++++++++++++++ 8 files changed, 119 insertions(+), 13 deletions(-) diff --git a/mypyc/analysis/attrdefined.py b/mypyc/analysis/attrdefined.py index 1368b7f5315f..02e02a82a4f9 100644 --- a/mypyc/analysis/attrdefined.py +++ b/mypyc/analysis/attrdefined.py @@ -415,6 +415,9 @@ def update_always_defined_attrs_using_subclasses(cl: ClassIR, seen: set[ClassIR] def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: + if cl.is_trait: + return + if cl in seen: return seen.add(cl) @@ -426,3 +429,9 @@ def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: for n, t in cl.attributes.items(): if t.error_overlap and not cl.is_always_defined(n): cl.bitmap_attrs.append(n) + + for base in cl.mro[1:]: + if base.is_trait: + for n, t in base.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n) and n not in cl.bitmap_attrs: + cl.bitmap_attrs.append(n) diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 534c4d1f20ea..56a22447eeac 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -330,7 +330,8 @@ def visit_get_attr(self, op: GetAttr) -> None: rtype = op.class_type cl = rtype.class_ir attr_rtype, decl_cl = cl.attr_details(op.attr) - if cl.get_method(op.attr): + prefer_method = cl.is_trait and attr_rtype.error_overlap + if cl.get_method(op.attr, prefer_method=prefer_method): # Properties are essentially methods, so use vtable access for them. version = "_TRAIT" if cl.is_trait else "" self.emit_line( diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 9b73eea3f8e6..a1534780b79b 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -294,10 +294,13 @@ def get_method_and_class( return None - def get_method(self, name: str) -> FuncIR | None: - res = self.get_method_and_class(name) + def get_method(self, name: str, *, prefer_method: bool = False) -> FuncIR | None: + res = self.get_method_and_class(name, prefer_method=prefer_method) return res[0] if res else None + def has_method_decl(self, name: str) -> bool: + return any(name in ir.method_decls for ir in self.mro) + def subclasses(self) -> set[ClassIR] | None: """Return all subclasses of this class, both direct and indirect. diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 4e4263458b3e..59b1c05a0ddb 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -161,14 +161,16 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: # Generate implicit property setters/getters for name, decl in ir.method_decls.items(): if decl.implicit and decl.is_prop_getter: - getter_ir = gen_property_getter_ir(builder, decl, cdef) + getter_ir = gen_property_getter_ir(builder, decl, cdef, ir.is_trait) builder.functions.append(getter_ir) ir.methods[getter_ir.decl.name] = getter_ir setter_ir = None setter_name = PROPSET_PREFIX + name if setter_name in ir.method_decls: - setter_ir = gen_property_setter_ir(builder, ir.method_decls[setter_name], cdef) + setter_ir = gen_property_setter_ir( + builder, ir.method_decls[setter_name], cdef, ir.is_trait + ) builder.functions.append(setter_ir) ir.methods[setter_name] = setter_ir diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 523f8c299c2f..5262b74e2853 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -1028,7 +1028,9 @@ def get_native_impl_ids(builder: IRBuilder, singledispatch_func: FuncDef) -> dic return {impl: i for i, (typ, impl) in enumerate(impls) if not is_decorated(builder, impl)} -def gen_property_getter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef) -> FuncIR: +def gen_property_getter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: """Generate an implicit trivial property getter for an attribute. These are used if an attribute can also be accessed as a property. @@ -1036,13 +1038,18 @@ def gen_property_getter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassD name = func_decl.name builder.enter(name) self_reg = builder.add_argument("self", func_decl.sig.args[0].type) - value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) - builder.add(Return(value)) + if not is_trait: + value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) + builder.add(Return(value)) + else: + builder.add(Unreachable()) args, _, blocks, ret_type, fn_info = builder.leave() return FuncIR(func_decl, args, blocks) -def gen_property_setter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef) -> FuncIR: +def gen_property_setter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: """Generate an implicit trivial property setter for an attribute. These are used if an attribute can also be accessed as a property. @@ -1053,7 +1060,8 @@ def gen_property_setter_ir(builder: IRBuilder, func_decl: FuncDecl, cdef: ClassD value_reg = builder.add_argument("value", func_decl.sig.args[1].type) assert name.startswith(PROPSET_PREFIX) attr_name = name[len(PROPSET_PREFIX) :] - builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) + if not is_trait: + builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) builder.add(Return(builder.none())) args, _, blocks, ret_type, fn_info = builder.leave() return FuncIR(func_decl, args, blocks) diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index eb8288b84818..3c519c3d1c33 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -305,7 +305,15 @@ def prepare_methods_and_attributes( if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): - ir.attributes[name] = mapper.type_to_rtype(node.node.type) + attr_rtype = mapper.type_to_rtype(node.node.type) + if ir.is_trait and attr_rtype.error_overlap: + # Traits don't have attribute definedness bitmaps, so use + # property accessor methods to access attributes that need them. + # We will generate accessor implementations that use the class bitmap + # for any concrete subclasses. + add_getter_declaration(ir, name, attr_rtype, module_name) + add_setter_declaration(ir, name, attr_rtype, module_name) + ir.attributes[name] = attr_rtype elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): @@ -329,11 +337,20 @@ def prepare_methods_and_attributes( def prepare_implicit_property_accessors( info: TypeInfo, ir: ClassIR, module_name: str, mapper: Mapper ) -> None: + concrete_attributes = set() for base in ir.base_mro: for name, attr_rtype in base.attributes.items(): + concrete_attributes.add(name) add_property_methods_for_attribute_if_needed( info, ir, name, attr_rtype, module_name, mapper ) + for base in ir.mro[1:]: + if base.is_trait: + for name, attr_rtype in base.attributes.items(): + if name not in concrete_attributes: + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) def add_property_methods_for_attribute_if_needed( @@ -350,6 +367,7 @@ def add_property_methods_for_attribute_if_needed( """ for base in info.mro[1:]: if base in mapper.type_to_ir: + base_ir = mapper.type_to_ir[base] n = base.names.get(attr_name) if n is None: continue @@ -361,6 +379,9 @@ def add_property_methods_for_attribute_if_needed( # Defined as a read-write property in base class/trait add_getter_declaration(ir, attr_name, attr_rtype, module_name) add_setter_declaration(ir, attr_name, attr_rtype, module_name) + elif base_ir.is_trait and attr_rtype.error_overlap: + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) def add_getter_declaration( diff --git a/mypyc/irbuild/vtable.py b/mypyc/irbuild/vtable.py index 13bc4d46e15d..2d4f7261e4ca 100644 --- a/mypyc/irbuild/vtable.py +++ b/mypyc/irbuild/vtable.py @@ -40,7 +40,7 @@ def compute_vtable(cls: ClassIR) -> None: for t in [cls] + cls.traits: for fn in itertools.chain(t.methods.values()): # TODO: don't generate a new entry when we overload without changing the type - if fn == cls.get_method(fn.name): + if fn == cls.get_method(fn.name, prefer_method=True): cls.vtable[fn.name] = len(entries) # If the class contains a glue method referring to itself, that is a # shadow glue method to support interpreted subclasses. @@ -60,7 +60,7 @@ def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: for entry in parent.vtable_entries: # Find the original method corresponding to this vtable entry. # (This may not be the method in the entry, if it was overridden.) - orig_parent_method = entry.cls.get_method(entry.name) + orig_parent_method = entry.cls.get_method(entry.name, prefer_method=True) assert orig_parent_method method_cls = cls.get_method_and_class(entry.name, prefer_method=True) if method_cls: diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index 0fc4b91330d4..d0f0fed4aabe 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -1433,3 +1433,65 @@ def test_read_only_property_in_trait_implemented_as_property() -> None: assert t.x == 6 with assertRaises(TypeError): t.y + +@trait +class T2: + x: i64 + y: i64 + +class C2(T2): + pass + +def test_inherit_trait_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + with assertRaises(AttributeError): + c.y + c.y = 6 + assert c.y == 6 + t: T2 = C2() + with assertRaises(AttributeError): + t.y + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D2(T2): + x: i64 + y: i64 = 4 + +def test_implement_trait_attribute() -> None: + d = D2() + d.x = 5 + assert d.x == 5 + d.x = MAGIC + assert d.x == MAGIC + assert d.y == 4 + d.y = 6 + assert d.y == 6 + t: T2 = D2() + assert t.y == 4 + t = d + assert t.x == MAGIC + d.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = d + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 From 0665ce924290dad3f30010b3bb93310a71c8db81 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 24 Jan 2023 17:46:17 +0000 Subject: [PATCH 208/292] Fix strict equality with enum type with custom __eq__ (#14518) Fixes regression introduced in #14513. --- mypy/checkexpr.py | 31 ++++++++++++++++++--------- test-data/unit/check-expressions.test | 26 ++++++++++++++++++++++ 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 8dea7d0e8551..e19d48f4f5e7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2970,7 +2970,7 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: not local_errors.has_new_errors() and cont_type and self.dangerous_comparison( - left_type, cont_type, original_container=right_type + left_type, cont_type, original_container=right_type, prefer_literal=False ) ): self.msg.dangerous_comparison(left_type, cont_type, "container", e) @@ -2988,21 +2988,19 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: # testCustomEqCheckStrictEquality for an example. if not w.has_new_errors() and operator in ("==", "!="): right_type = self.accept(right) - # Also flag non-overlapping literals in situations like: - # x: Literal['a', 'b'] - # if x == 'c': - # ... - left_type = try_getting_literal(left_type) - right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) self.msg.dangerous_comparison(left_type, right_type, "equality", e) elif operator == "is" or operator == "is not": right_type = self.accept(right) # validate the right operand sub_result = self.bool_type() - left_type = try_getting_literal(left_type) - right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) self.msg.dangerous_comparison(left_type, right_type, "identity", e) method_type = None else: @@ -3036,7 +3034,12 @@ def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: return None def dangerous_comparison( - self, left: Type, right: Type, original_container: Type | None = None + self, + left: Type, + right: Type, + original_container: Type | None = None, + *, + prefer_literal: bool = True, ) -> bool: """Check for dangerous non-overlapping comparisons like 42 == 'no'. @@ -3064,6 +3067,14 @@ def dangerous_comparison( if custom_special_method(left, "__eq__") or custom_special_method(right, "__eq__"): return False + if prefer_literal: + # Also flag non-overlapping literals in situations like: + # x: Literal['a', 'b'] + # if x == 'c': + # ... + left = try_getting_literal(left) + right = try_getting_literal(right) + if self.chk.binder.is_unreachable_warning_suppressed(): # We are inside a function that contains type variables with value restrictions in # its signature. In this case we just suppress all strict-equality checks to avoid diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 20ccbb17d5d5..49a3f0d4aaa7 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -2221,6 +2221,32 @@ int == y y == int [builtins fixtures/bool.pyi] +[case testStrictEqualityAndEnumWithCustomEq] +# flags: --strict-equality +from enum import Enum + +class E1(Enum): + X = 0 + Y = 1 + +class E2(Enum): + X = 0 + Y = 1 + + def __eq__(self, other: object) -> bool: + return bool() + +E1.X == E1.Y # E: Non-overlapping equality check (left operand type: "Literal[E1.X]", right operand type: "Literal[E1.Y]") +E2.X == E2.Y +[builtins fixtures/bool.pyi] + +[case testStrictEqualityWithBytesContains] +# flags: --strict-equality +data = b"xy" +b"x" in data +[builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] + [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name "Any" is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") From 6442b02400ac6e6715247d29ea2d3da8ca8e35d8 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Thu, 26 Jan 2023 13:20:29 -0800 Subject: [PATCH 209/292] Adjust SCC setup to enable earlier collections.abc import in typeshed (#14088) Fixes #11860 (?) Typeshed is currently unable to import Sequence, MutableSequence, or ByteString from collections.abc within builtins.pyi. It seems this is because: 1. In order to analyze `collections.abc`, we first need to analyze `collections`. 2. Since `collections` is a package containing an `__init__.pyi` file, the `add_implicit_module_attrs` function will try adding the `__path__` variable to the symboltable. 3. The `__path__` variable has type `builtins.str`. But str is a subclass of Sequence, which we have not analyzed yet since we're still in the middle of analyzing `collections` and `collections.abc`. This diff tries repairing this by: 1. Adding `_collections_abc` and `collections.abc` to the set of special-cased core modules we deliberately process early. 2. Modifying `add_implicit_module_attrs` so it does the same trick we do for the `__doc__` symbol and fall back to using an UnboundType if `builtins.str` is not defined yet. To be 100% honest, I'm not really sold on this PR for a few reasons: - I was able to test these changes manually, but wasn't sure how to write tests for them. - We have 3-4 subtly different lists of "core modules" scattered throughout mypy. For example, see `CORE_BUILTIN_MODULES` in mypy/build.py or try grepping for the string `"typing"` in the mypy dir. Arguably, we should defer landing this PR until we've had a chance to consolidate these lists and confirm there are no additional places where we need to special-case `_collections_abc`, `collections`, and `collections.abc`. - PEP 585 attempted to declare that we should one day remove entries like Sequence from `typing` module, but this realistically doesn't seem ever achievable given that (a) it would break backwards compat and (b) there doesn't seem to be any incentives for users to proactively switch. In that case, is there any pressing reason to change typeshed? Regardless, this is a crash and my goal atm is to de-crash mypy, so I'm throwing this over the wall. --- mypy/semanal.py | 9 ++++++--- mypy/semanal_main.py | 9 ++++++++- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 34cb45194d19..45cfb5b13847 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -615,9 +615,12 @@ def refresh_top_level(self, file_node: MypyFile) -> None: def add_implicit_module_attrs(self, file_node: MypyFile) -> None: """Manually add implicit definitions of module '__name__' etc.""" + str_type: Type | None = self.named_type_or_none("builtins.str") + if str_type is None: + str_type = UnboundType("builtins.str") for name, t in implicit_module_attrs.items(): if name == "__doc__": - typ: Type = UnboundType("__builtins__.str") + typ: Type = str_type elif name == "__path__": if not file_node.is_package_init_file(): continue @@ -630,7 +633,7 @@ def add_implicit_module_attrs(self, file_node: MypyFile) -> None: if not isinstance(node, TypeInfo): self.defer(node) return - typ = Instance(node, [self.str_type()]) + typ = Instance(node, [str_type]) elif name == "__annotations__": sym = self.lookup_qualified("__builtins__.dict", Context(), suppress_errors=True) if not sym: @@ -639,7 +642,7 @@ def add_implicit_module_attrs(self, file_node: MypyFile) -> None: if not isinstance(node, TypeInfo): self.defer(node) return - typ = Instance(node, [self.str_type(), AnyType(TypeOfAny.special_form)]) + typ = Instance(node, [str_type, AnyType(TypeOfAny.special_form)]) else: assert t is not None, f"type should be specified for {name}" typ = UnboundType(t) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 9e3aeaa7fa4b..31bcdc2b703d 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -66,7 +66,14 @@ # Number of passes over core modules before going on to the rest of the builtin SCC. CORE_WARMUP: Final = 2 -core_modules: Final = ["typing", "builtins", "abc", "collections"] +core_modules: Final = [ + "typing", + "_collections_abc", + "builtins", + "abc", + "collections", + "collections.abc", +] def semantic_analysis_for_scc(graph: Graph, scc: list[str], errors: Errors) -> None: From bac9e77eaeb36c0535bc05cb1faf6eced25b8af1 Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Thu, 26 Jan 2023 23:21:40 +0000 Subject: [PATCH 210/292] [dataclass_transform] minimal implementation of dataclass_transform (#14523) This is a very simple first step to implementing [PEP 0681](https://peps.python.org/pep-0681/#decorator-function-example), which will allow MyPy to recognize user-defined types that behave similarly to dataclasses. This initial implementation is very limited: we only support decorator-style use of `typing.dataclass_transform` and do not support passing additional options to the transform (such as `freeze` or `init`). Within MyPy, we add a new `is_dataclass_transform` field to `FuncBase` which is populated during semantic analysis. When we check for plugin hooks later, we add new special cases to use the existing dataclasses plugin if a class decorator is marked with `is_dataclass_transform`. Ideally we would use a proper plugin API; the hacky special case here can be replaced in subsequent iterations. Co-authored-by: Wesley Wright --- mypy/nodes.py | 10 +++- mypy/plugins/common.py | 2 +- mypy/semanal.py | 23 ++++++++-- mypy/semanal_main.py | 14 +++++- mypy/semanal_shared.py | 5 ++ mypy/types.py | 5 ++ test-data/unit/check-dataclass-transform.test | 46 +++++++++++++++++++ test-data/unit/fixtures/dataclasses.pyi | 6 ++- test-data/unit/fixtures/typing-medium.pyi | 2 + test-data/unit/lib-stub/typing_extensions.pyi | 2 + 10 files changed, 106 insertions(+), 9 deletions(-) create mode 100644 test-data/unit/check-dataclass-transform.test diff --git a/mypy/nodes.py b/mypy/nodes.py index 38639d553b3d..98976f4fe56a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -480,7 +480,13 @@ def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) -FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] +FUNCBASE_FLAGS: Final = [ + "is_property", + "is_class", + "is_static", + "is_final", + "is_dataclass_transform", +] class FuncBase(Node): @@ -506,6 +512,7 @@ class FuncBase(Node): "is_static", # Uses "@staticmethod" "is_final", # Uses "@final" "_fullname", + "is_dataclass_transform", # Is decorated with "@typing.dataclass_transform" or similar ) def __init__(self) -> None: @@ -524,6 +531,7 @@ def __init__(self) -> None: self.is_final = False # Name with module prefix self._fullname = "" + self.is_dataclass_transform = False @property @abstractmethod diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 07cd5dc7de7f..a2a38f256da3 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -19,7 +19,7 @@ Var, ) from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface -from mypy.semanal import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name +from mypy.semanal_shared import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name from mypy.typeops import ( # noqa: F401 # Part of public API try_getting_str_literals as try_getting_str_literals, ) diff --git a/mypy/semanal.py b/mypy/semanal.py index 45cfb5b13847..15566c9396c6 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -194,6 +194,7 @@ Plugin, SemanticAnalyzerPluginInterface, ) +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.reachability import ( ALWAYS_FALSE, ALWAYS_TRUE, @@ -208,6 +209,7 @@ from mypy.semanal_namedtuple import NamedTupleAnalyzer from mypy.semanal_newtype import NewTypeAnalyzer from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, PRIORITY_FALLBACKS, SemanticAnalyzerInterface, calculate_tuple_fallback, @@ -234,6 +236,7 @@ from mypy.typeops import function_type, get_type_vars from mypy.types import ( ASSERT_TYPE_NAMES, + DATACLASS_TRANSFORM_NAMES, FINAL_DECORATOR_NAMES, FINAL_TYPE_NAMES, NEVER_NAMES, @@ -304,10 +307,6 @@ # available very early on. CORE_BUILTIN_CLASSES: Final = ["object", "bool", "function"] -# Subclasses can override these Var attributes with incompatible types. This can also be -# set for individual attributes using 'allow_incompatible_override' of Var. -ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") - # Used for tracking incomplete references Tag: _TypeAlias = int @@ -1508,6 +1507,10 @@ def visit_decorator(self, dec: Decorator) -> None: removed.append(i) else: self.fail("@final cannot be used with non-method functions", d) + elif isinstance(d, CallExpr) and refers_to_fullname( + d.callee, DATACLASS_TRANSFORM_NAMES + ): + dec.func.is_dataclass_transform = True elif not dec.var.is_property: # We have seen a "non-trivial" decorator before seeing @property, if # we will see a @property later, give an error, as we don't support this. @@ -1709,6 +1712,11 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook(decorator_name) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and is_dataclass_transform_decorator(decorator): + hook = dataclasses_plugin.dataclass_tag_callback if hook: hook(ClassDefContext(defn, decorator, self)) @@ -6599,3 +6607,10 @@ def halt(self, reason: str = ...) -> NoReturn: return isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) ) + + +def is_dataclass_transform_decorator(node: Node | None) -> bool: + if isinstance(node, RefExpr): + return is_dataclass_transform_decorator(node.node) + + return isinstance(node, Decorator) and node.func.is_dataclass_transform diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 31bcdc2b703d..d2dd0e32398d 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -37,9 +37,11 @@ from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo, Var from mypy.options import Options from mypy.plugin import ClassDefContext +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.semanal import ( SemanticAnalyzer, apply_semantic_analyzer_patches, + is_dataclass_transform_decorator, remove_imported_names_from_symtable, ) from mypy.semanal_classprop import ( @@ -457,11 +459,19 @@ def apply_hooks_to_class( ok = True for decorator in defn.decorators: with self.file_context(file_node, options, info): + hook = None + decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook_2(decorator_name) - if hook: - ok = ok and hook(ClassDefContext(defn, decorator, self)) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and is_dataclass_transform_decorator(decorator): + hook = dataclasses_plugin.dataclass_class_maker_callback + + if hook: + ok = ok and hook(ClassDefContext(defn, decorator, self)) return ok diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index f4bc173b52d5..11c4af314a3b 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -38,6 +38,11 @@ get_proper_type, ) +# Subclasses can override these Var attributes with incompatible types. This can also be +# set for individual attributes using 'allow_incompatible_override' of Var. +ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") + + # Priorities for ordering of patches within the "patch" phase of semantic analysis # (after the main pass): diff --git a/mypy/types.py b/mypy/types.py index bf610a01b63b..74656cc270f3 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -150,6 +150,11 @@ "typing_extensions.Never", ) +DATACLASS_TRANSFORM_NAMES: Final = ( + "typing.dataclass_transform", + "typing_extensions.dataclass_transform", +) + # A placeholder used for Bogus[...] parameters _dummy: Final[Any] = object() diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test new file mode 100644 index 000000000000..4f907e3186b6 --- /dev/null +++ b/test-data/unit/check-dataclass-transform.test @@ -0,0 +1,46 @@ +[case testDataclassTransformReusesDataclassLogic] +# flags: --python-version 3.7 +from typing import dataclass_transform, Type + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-medium.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformIsFoundInTypingExtensions] +# flags: --python-version 3.7 +from typing import Type +from typing_extensions import dataclass_transform + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index 206843a88b24..7de40af9cfe7 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -37,7 +37,11 @@ class dict(Mapping[KT, VT]): def get(self, k: KT, default: Union[KT, _T]) -> Union[VT, _T]: pass def __len__(self) -> int: ... -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> int: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + class function: pass class classmethod: pass property = object() diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 863b0703989d..0d0e13468013 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -71,3 +71,5 @@ class ContextManager(Generic[T]): class _SpecialForm: pass TYPE_CHECKING = 1 + +def dataclass_transform() -> Callable[[T], T]: ... diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index cbf692fc7111..89f7108fe83c 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -57,3 +57,5 @@ class _TypedDict(Mapping[str, object]): def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... def reveal_type(__obj: T) -> T: pass + +def dataclass_transform() -> Callable[[T], T]: ... From e778a58066f23982d5cbe1df5317d6540c5902fd Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 27 Jan 2023 11:17:13 +0000 Subject: [PATCH 211/292] [mypyc] Support type narrowing of native int types using "int" (#14524) Now `isinstance(x, int)` can be used to narrow a union type that includes a native int type. In mypyc unions there is no runtime distinction between different integer types -- everything is represented at runtime as boxed `int` values anyway. Also test narrowing a native int using the same native int type. Work on mypyc/mypyc#837. --- mypy/checker.py | 6 +-- mypy/meet.py | 4 ++ mypy/semanal_classprop.py | 4 +- mypy/subtypes.py | 22 ++++++---- mypy/types.py | 3 ++ mypyc/test-data/irbuild-i64.test | 64 ++++++++++++++++++++++++++++ mypyc/test-data/run-i64.test | 18 +++++++- test-data/unit/check-native-int.test | 44 +++++++++++++++++++ 8 files changed, 150 insertions(+), 15 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 46200f5813cc..1f635c09bc0a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -178,6 +178,7 @@ ) from mypy.types import ( ANY_STRATEGY, + MYPYC_NATIVE_INT_NAMES, OVERLOAD_NAMES, AnyType, BoolTypeQuery, @@ -4517,10 +4518,7 @@ def analyze_range_native_int_type(self, expr: Expression) -> Type | None: ok = True for arg in expr.args: argt = get_proper_type(self.lookup_type(arg)) - if isinstance(argt, Instance) and argt.type.fullname in ( - "mypy_extensions.i64", - "mypy_extensions.i32", - ): + if isinstance(argt, Instance) and argt.type.fullname in MYPYC_NATIVE_INT_NAMES: if native_int is None: native_int = argt elif argt != native_int: diff --git a/mypy/meet.py b/mypy/meet.py index 8760b8c6d4fe..1cc125f3bfd6 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -15,6 +15,7 @@ ) from mypy.typeops import is_recursive_pair, make_simplified_union, tuple_fallback from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, AnyType, CallableType, DeletedType, @@ -475,6 +476,9 @@ def _type_object_overlap(left: Type, right: Type) -> bool: ): return True + if right.type.fullname == "builtins.int" and left.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True + # Two unrelated types cannot be partially overlapping: they're disjoint. if left.type.has_base(right.type.fullname): left = map_instance_to_supertype(left, right.type) diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index 5d21babcc597..ead80aed67b6 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -22,7 +22,7 @@ Var, ) from mypy.options import Options -from mypy.types import Instance, ProperType +from mypy.types import MYPYC_NATIVE_INT_NAMES, Instance, ProperType # Hard coded type promotions (shared between all Python versions). # These add extra ad-hoc edges to the subtyping relation. For example, @@ -177,7 +177,7 @@ def add_type_promotion( # Special case the promotions between 'int' and native integer types. # These have promotions going both ways, such as from 'int' to 'i64' # and 'i64' to 'int', for convenience. - if defn.fullname == "mypy_extensions.i64" or defn.fullname == "mypy_extensions.i32": + if defn.fullname in MYPYC_NATIVE_INT_NAMES: int_sym = builtin_names["int"] assert isinstance(int_sym.node, TypeInfo) int_sym.node._promote.append(Instance(defn.info, [])) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 4bf3672af740..9b555480e59b 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -27,6 +27,7 @@ from mypy.options import Options from mypy.state import state from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, TUPLE_LIKE_INSTANCE_NAMES, TYPED_NAMEDTUPLE_NAMES, AnyType, @@ -1793,14 +1794,19 @@ def covers_at_runtime(item: Type, supertype: Type) -> bool: erase_type(item), supertype, ignore_promotions=True, erase_instances=True ): return True - if isinstance(supertype, Instance) and supertype.type.is_protocol: - # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. - if is_proper_subtype(item, supertype, ignore_promotions=True): - return True - if isinstance(item, TypedDictType) and isinstance(supertype, Instance): - # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). - if supertype.type.fullname == "builtins.dict": - return True + if isinstance(supertype, Instance): + if supertype.type.is_protocol: + # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. + if is_proper_subtype(item, supertype, ignore_promotions=True): + return True + if isinstance(item, TypedDictType): + # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). + if supertype.type.fullname == "builtins.dict": + return True + elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": + # "int" covers all native int types + if item.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True # TODO: Add more special cases. return False diff --git a/mypy/types.py b/mypy/types.py index 74656cc270f3..0244f57847c5 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -150,6 +150,9 @@ "typing_extensions.Never", ) +# Mypyc fixed-width native int types (compatible with builtins.int) +MYPYC_NATIVE_INT_NAMES: Final = ("mypy_extensions.i64", "mypy_extensions.i32") + DATACLASS_TRANSFORM_NAMES: Final = ( "typing.dataclass_transform", "typing_extensions.dataclass_transform", diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index 47802d8e0c97..6b8dd357421f 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1834,3 +1834,67 @@ L0: r0 = CPyLong_FromFloat(x) r1 = unbox(int64, r0) return r1 + +[case testI64IsinstanceNarrowing] +from typing import Union +from mypy_extensions import i64 + +class C: + a: i64 + +def narrow1(x: Union[C, i64]) -> i64: + if isinstance(x, i64): + return x + return x.a + +def narrow2(x: Union[C, i64]) -> i64: + if isinstance(x, int): + return x + return x.a +[out] +def narrow1(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 +def narrow2(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index d0f0fed4aabe..ea94741dbd51 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -1,5 +1,5 @@ [case testI64BasicOps] -from typing import List, Any, Tuple +from typing import List, Any, Tuple, Union MYPY = False if MYPY: @@ -497,6 +497,22 @@ def test_for_loop() -> None: assert n == 9 assert sum([x * x for x in range(i64(4 + int()))]) == 1 + 4 + 9 +def narrow1(x: Union[str, i64]) -> i64: + if isinstance(x, i64): + return x + return len(x) + +def narrow2(x: Union[str, i64]) -> i64: + if isinstance(x, int): + return x + return len(x) + +def test_isinstance() -> None: + assert narrow1(123) == 123 + assert narrow1("foobar") == 6 + assert narrow2(123) == 123 + assert narrow2("foobar") == 6 + [case testI64ErrorValuesAndUndefined] from typing import Any, Tuple import sys diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test index 24bf0d99b145..1e945d0af27d 100644 --- a/test-data/unit/check-native-int.test +++ b/test-data/unit/check-native-int.test @@ -184,3 +184,47 @@ from mypy_extensions import i64, i32 reveal_type([a for a in range(i64(5))]) # N: Revealed type is "builtins.list[mypy_extensions.i64]" [reveal_type(a) for a in range(0, i32(5))] # N: Revealed type is "mypy_extensions.i32" [builtins fixtures/primitives.pyi] + +[case testNativeIntNarrowing] +from typing import Union +from mypy_extensions import i64, i32 + +def narrow_i64(x: Union[str, i64]) -> None: + if isinstance(x, i64): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + +def narrow_i32(x: Union[str, i32]) -> None: + if isinstance(x, i32): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + +[builtins fixtures/primitives.pyi] From 81efd6eaa0f8300ec4bd29d6797b9e4476a06d7d Mon Sep 17 00:00:00 2001 From: JoaquimEsteves Date: Fri, 27 Jan 2023 14:12:24 +0100 Subject: [PATCH 212/292] =?UTF-8?q?=E2=9C=A8=20Added=20new=20Error=20to=20?= =?UTF-8?q?TypedDict=20(#14225)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #4617 This allows the following code to trigger the error `typeddict-unknown-key` ```python A = T.TypedDict("A", {"x": int}) def f(x: A) -> None: ... f({"x": 1, "y": "foo"}) # err: typeddict-unknown-key f({"y": "foo"}) # err: typeddict-unknown-key & typeddict-item f({"x": 'err', "y": "foo"}) # err: typeddict-unknown-key & typeddict-item a: A = { 'x': 1 } # You can set extra attributes a['extra'] = 'extra' # err: typeddict-unknown-key # Reading them produces the normal item error err = a['does not exist'] # err: typeddict-item ``` The user can then safely ignore this specific error at their disgression. Co-authored-by: Ivan Levkivskyi --- docs/source/error_code_list.rst | 50 ++++++++++++++++++++++++++++ mypy/checkexpr.py | 18 ++++++---- mypy/checkmember.py | 4 ++- mypy/errorcodes.py | 3 ++ mypy/messages.py | 48 +++++++++++++------------- test-data/unit/check-errorcodes.test | 11 ++++-- test-data/unit/check-typeddict.test | 3 +- 7 files changed, 103 insertions(+), 34 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 1a39bf8feb6c..674ad08c4d09 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -430,6 +430,56 @@ Example: # Error: Incompatible types (expression has type "float", # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} + +Check TypedDict Keys [typeddict-unknown-key] +-------------------------------------------- + +When constructing a ``TypedDict`` object, mypy checks whether the definition +contains unknown keys. For convenience's sake, mypy will not generate an error +when a ``TypedDict`` has extra keys if it's passed to a function as an argument. +However, it will generate an error when these are created. Example: + +.. code-block:: python + + from typing_extensions import TypedDict + + class Point(TypedDict): + x: int + y: int + + class Point3D(Point): + z: int + + def add_x_coordinates(a: Point, b: Point) -> int: + return a["x"] + b["x"] + + a: Point = {"x": 1, "y": 4} + b: Point3D = {"x": 2, "y": 5, "z": 6} + + # OK + add_x_coordinates(a, b) + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + add_x_coordinates(a, {"x": 1, "y": 4, "z": 5}) + + +Setting an unknown value on a ``TypedDict`` will also generate this error: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + a["z"] = 3 + + +Whereas reading an unknown value will generate the more generic/serious +``typeddict-item``: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: TypedDict "Point" has no key "z" [typeddict-item] + _ = a["z"] + Check that type of target is known [has-type] --------------------------------------------- diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index e19d48f4f5e7..43d3242ce1a1 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -790,17 +790,21 @@ def check_typeddict_call_with_kwargs( context: Context, orig_callee: Type | None, ) -> Type: - if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())): + actual_keys = kwargs.keys() + if not (callee.required_keys <= actual_keys <= callee.items.keys()): expected_keys = [ key for key in callee.items.keys() - if key in callee.required_keys or key in kwargs.keys() + if key in callee.required_keys or key in actual_keys ] - actual_keys = kwargs.keys() self.msg.unexpected_typeddict_keys( callee, expected_keys=expected_keys, actual_keys=list(actual_keys), context=context ) - return AnyType(TypeOfAny.from_error) + if callee.required_keys > actual_keys: + # found_set is a sub-set of the required_keys + # This means we're missing some keys and as such, we can't + # properly type the object + return AnyType(TypeOfAny.from_error) orig_callee = get_proper_type(orig_callee) if isinstance(orig_callee, CallableType): @@ -3777,7 +3781,9 @@ def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) return self.chk.named_generic_type("builtins.tuple", [union]) return union - def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type: + def visit_typeddict_index_expr( + self, td_type: TypedDictType, index: Expression, setitem: bool = False + ) -> Type: if isinstance(index, StrExpr): key_names = [index.value] else: @@ -3806,7 +3812,7 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) for key_name in key_names: value_type = td_type.items.get(key_name) if value_type is None: - self.msg.typeddict_key_not_found(td_type, key_name, index) + self.msg.typeddict_key_not_found(td_type, key_name, index, setitem) return AnyType(TypeOfAny.from_error) else: value_types.append(value_type) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index f90a4f706a87..a2c580e13446 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -1073,7 +1073,9 @@ def analyze_typeddict_access( if isinstance(mx.context, IndexExpr): # Since we can get this during `a['key'] = ...` # it is safe to assume that the context is `IndexExpr`. - item_type = mx.chk.expr_checker.visit_typeddict_index_expr(typ, mx.context.index) + item_type = mx.chk.expr_checker.visit_typeddict_index_expr( + typ, mx.context.index, setitem=True + ) else: # It can also be `a.__setitem__(...)` direct call. # In this case `item_type` can be `Any`, diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 5696763ec9d1..ab49e70eaf20 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -67,6 +67,9 @@ def __str__(self) -> str: TYPEDDICT_ITEM: Final = ErrorCode( "typeddict-item", "Check items when constructing TypedDict", "General" ) +TYPPEDICT_UNKNOWN_KEY: Final = ErrorCode( + "typeddict-unknown-key", "Check unknown keys when constructing TypedDict", "General" +) HAS_TYPE: Final = ErrorCode( "has-type", "Check that type of reference can be determined", "General" ) diff --git a/mypy/messages.py b/mypy/messages.py index 94a97f696b6c..750dcdd42398 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1637,9 +1637,9 @@ def unexpected_typeddict_keys( expected_set = set(expected_keys) if not typ.is_anonymous(): # Generate simpler messages for some common special cases. - if actual_set < expected_set: - # Use list comprehension instead of set operations to preserve order. - missing = [key for key in expected_keys if key not in actual_set] + # Use list comprehension instead of set operations to preserve order. + missing = [key for key in expected_keys if key not in actual_set] + if missing: self.fail( "Missing {} for TypedDict {}".format( format_key_list(missing, short=True), format_type(typ) @@ -1647,20 +1647,18 @@ def unexpected_typeddict_keys( context, code=codes.TYPEDDICT_ITEM, ) + extra = [key for key in actual_keys if key not in expected_set] + if extra: + self.fail( + "Extra {} for TypedDict {}".format( + format_key_list(extra, short=True), format_type(typ) + ), + context, + code=codes.TYPPEDICT_UNKNOWN_KEY, + ) + if missing or extra: + # No need to check for further errors return - else: - extra = [key for key in actual_keys if key not in expected_set] - if extra: - # If there are both extra and missing keys, only report extra ones for - # simplicity. - self.fail( - "Extra {} for TypedDict {}".format( - format_key_list(extra, short=True), format_type(typ) - ), - context, - code=codes.TYPEDDICT_ITEM, - ) - return found = format_key_list(actual_keys, short=True) if not expected_keys: self.fail(f"Unexpected TypedDict {found}", context) @@ -1680,8 +1678,15 @@ def typeddict_key_must_be_string_literal(self, typ: TypedDictType, context: Cont ) def typeddict_key_not_found( - self, typ: TypedDictType, item_name: str, context: Context + self, typ: TypedDictType, item_name: str, context: Context, setitem: bool = False ) -> None: + """Handle error messages for TypedDicts that have unknown keys. + + Note, that we differentiate in between reading a value and setting a + value. + Setting a value on a TypedDict is an 'unknown-key' error, whereas + reading it is the more serious/general 'item' error. + """ if typ.is_anonymous(): self.fail( '"{}" is not a valid TypedDict key; expected one of {}'.format( @@ -1690,17 +1695,14 @@ def typeddict_key_not_found( context, ) else: + err_code = codes.TYPPEDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM self.fail( - f'TypedDict {format_type(typ)} has no key "{item_name}"', - context, - code=codes.TYPEDDICT_ITEM, + f'TypedDict {format_type(typ)} has no key "{item_name}"', context, code=err_code ) matches = best_matches(item_name, typ.items.keys(), n=3) if matches: self.note( - "Did you mean {}?".format(pretty_seq(matches, "or")), - context, - code=codes.TYPEDDICT_ITEM, + "Did you mean {}?".format(pretty_seq(matches, "or")), context, code=err_code ) def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 19ce56057ff5..8c6a446d101e 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -455,11 +455,15 @@ class E(TypedDict): y: int a: D = {'x': ''} # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -b: D = {'y': ''} # E: Extra key "y" for TypedDict "D" [typeddict-item] +b: D = {'y': ''} # E: Missing key "x" for TypedDict "D" [typeddict-item] \ + # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] c = D(x=0) if int() else E(x=0, y=0) c = {} # E: Expected TypedDict key "x" but found no keys [typeddict-item] +d: D = {'x': '', 'y': 1} # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] \ + # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-item] + +a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-unknown-key] a['x'] = 'x' # E: Value of "x" has incompatible type "str"; expected "int" [typeddict-item] a['y'] # E: TypedDict "D" has no key "y" [typeddict-item] [builtins fixtures/dict.pyi] @@ -472,7 +476,8 @@ class A(TypedDict): two_commonparts: int a: A = {'one_commonpart': 1, 'two_commonparts': 2} -a['other_commonpart'] = 3 # type: ignore[typeddict-item] +a['other_commonpart'] = 3 # type: ignore[typeddict-unknown-key] +not_exist = a['not_exist'] # type: ignore[typeddict-item] [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 70ff6a4a6759..1f200d168a55 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -2030,7 +2030,8 @@ v = {union: 2} # E: Expected TypedDict key to be string literal num2: Literal['num'] v = {num2: 2} bad2: Literal['bad'] -v = {bad2: 2} # E: Extra key "bad" for TypedDict "Value" +v = {bad2: 2} # E: Missing key "num" for TypedDict "Value" \ + # E: Extra key "bad" for TypedDict "Value" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] From 91e858199147e69cbc6f2f658ded66ebd31086f6 Mon Sep 17 00:00:00 2001 From: Yurii Karabas <1998uriyyo@gmail.com> Date: Sun, 29 Jan 2023 20:08:10 +0200 Subject: [PATCH 213/292] Fix internal crash when resolve same partial type twice (#14552) Fixes: #14548 Fixed case when untyped list item type resolving can lead to an internal crash. Code to reproduce this issue: ```py arr = [] arr.append(arr.append(1)) ``` Basically, the issue is that after the first resolving of `arr.append` method, `var` is deleted from `partial_types`, and as war as `arr.append` is a nested call we try to delete the same `var` that was already deleted. --- mypy/checkexpr.py | 3 ++- test-data/unit/check-inference.test | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 43d3242ce1a1..d918eb9b5467 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -899,7 +899,8 @@ def try_infer_partial_type(self, e: CallExpr) -> None: return var, partial_types = ret typ = self.try_infer_partial_value_type_from_call(e, callee.name, var) - if typ is not None: + # Var may be deleted from partial_types in try_infer_partial_value_type_from_call + if typ is not None and var in partial_types: var.type = typ del partial_types[var] elif isinstance(callee.expr, IndexExpr) and isinstance(callee.expr.base, RefExpr): diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 331b110fded6..fc8113766f1a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1951,6 +1951,13 @@ class A: [out] main:4: error: "None" has no attribute "__iter__" (not iterable) +[case testPartialTypeErrorSpecialCase4] +# This used to crash. +arr = [] +arr.append(arr.append(1)) +[builtins fixtures/list.pyi] +[out] +main:3: error: "append" of "list" does not return a value -- Multipass -- --------- From 6413aacb7ad206b7b1152ddd6aa8f3f29bef8174 Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Sun, 29 Jan 2023 18:13:47 -0500 Subject: [PATCH 214/292] Remove Python 2 builtins (#14555) Follow up to #14083. --- mypy/test/data.py | 5 ++--- test-data/unit/lib-stub/__builtin__.pyi | 29 ------------------------- 2 files changed, 2 insertions(+), 32 deletions(-) delete mode 100644 test-data/unit/lib-stub/__builtin__.pyi diff --git a/mypy/test/data.py b/mypy/test/data.py index f4cb39818b4e..c6f671b2d401 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -81,13 +81,12 @@ def parse_test_case(case: DataDrivenTestCase) -> None: output_files.append((file_entry[0], re.compile(file_entry[1].rstrip(), re.S))) else: output_files.append(file_entry) - elif item.id in ("builtins", "builtins_py2"): + elif item.id == "builtins": # Use an alternative stub file for the builtins module. assert item.arg is not None mpath = join(os.path.dirname(case.file), item.arg) - fnam = "builtins.pyi" if item.id == "builtins" else "__builtin__.pyi" with open(mpath, encoding="utf8") as f: - files.append((join(base_path, fnam), f.read())) + files.append((join(base_path, "builtins.pyi"), f.read())) elif item.id == "typing": # Use an alternative stub file for the typing module. assert item.arg is not None diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi deleted file mode 100644 index f9ee7b74011d..000000000000 --- a/test-data/unit/lib-stub/__builtin__.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Generic, TypeVar -_T = TypeVar('_T') - -Any = 0 - -class object: - def __init__(self): - # type: () -> None - pass - -class type: - def __init__(self, x): - # type: (Any) -> None - pass - -# These are provided here for convenience. -class int: pass -class float: pass - -class str: pass - -class tuple(Generic[_T]): pass -class function: pass - -class ellipsis: pass - -def print(*args, end=''): pass - -# Definition of None is implicit From c4ecd2bf9799de418f0782b108ed2c2eb0d4820f Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 29 Jan 2023 23:31:46 +0000 Subject: [PATCH 215/292] Fix a crash on walrus in comprehension at class scope (#14556) Fixes #14201 The fix is trivial, turn an assert condition into a blocker error (with message matching Python syntax error). I also add a test case for a crash from the same issue that looks already fixed. --- mypy/semanal.py | 25 +++++++++++++++++++++++++ test-data/unit/check-python38.test | 5 +++++ test-data/unit/check-statements.test | 4 ++++ 3 files changed, 34 insertions(+) diff --git a/mypy/semanal.py b/mypy/semanal.py index 15566c9396c6..382d3e650995 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2658,8 +2658,33 @@ def visit_import_all(self, i: ImportAll) -> None: def visit_assignment_expr(self, s: AssignmentExpr) -> None: s.value.accept(self) + if self.is_func_scope(): + if not self.check_valid_comprehension(s): + return self.analyze_lvalue(s.target, escape_comprehensions=True, has_explicit_value=True) + def check_valid_comprehension(self, s: AssignmentExpr) -> bool: + """Check that assignment expression is not nested within comprehension at class scope. + + class C: + [(j := i) for i in [1, 2, 3]] + is a syntax error that is not enforced by Python parser, but at later steps. + """ + for i, is_comprehension in enumerate(reversed(self.is_comprehension_stack)): + if not is_comprehension and i < len(self.locals) - 1: + if self.locals[-1 - i] is None: + self.fail( + "Assignment expression within a comprehension" + " cannot be used in a class body", + s, + code=codes.SYNTAX, + serious=True, + blocker=True, + ) + return False + break + return True + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.statement = s diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index c8fb1eb5aac8..7e5e0f3cf185 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -734,3 +734,8 @@ class C(Generic[T]): [out] main:10: note: Revealed type is "builtins.int" main:10: note: Revealed type is "builtins.str" + +[case testNoCrashOnAssignmentExprClass] +class C: + [(j := i) for i in [1, 2, 3]] # E: Assignment expression within a comprehension cannot be used in a class body +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index d1a2469efa56..ed7349aaa296 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -2194,3 +2194,7 @@ class B: pass def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: yield x # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]") + +[case testNoCrashOnStarRightHandSide] +x = *(1, 2, 3) # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] From 8af3af311cc75bc8a846e8f946143ddc7c67f619 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 29 Jan 2023 23:32:10 +0000 Subject: [PATCH 216/292] Support protocol inference for Type[T] via metaclass (#14554) Fixes #12553 This looks quite niche, but also it was mentioned recently couple times for a real-life use case: enum classes, and implementation looks simple. --- mypy/constraints.py | 11 +++++++++++ test-data/unit/check-protocols.test | 21 +++++++++++++++++++++ test-data/unit/pythoneval.test | 19 +++++++++++++++++++ 3 files changed, 51 insertions(+) diff --git a/mypy/constraints.py b/mypy/constraints.py index 697e793cb11d..a8f04094ca63 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -619,6 +619,17 @@ def visit_instance(self, template: Instance) -> list[Constraint]: actual.item, template, subtype, template, class_obj=True ) ) + if self.direction == SUPERTYPE_OF: + # Infer constraints for Type[T] via metaclass of T when it makes sense. + a_item = actual.item + if isinstance(a_item, TypeVarType): + a_item = get_proper_type(a_item.upper_bound) + if isinstance(a_item, Instance) and a_item.type.metaclass_type: + res.extend( + self.infer_constraints_from_protocol_members( + a_item.type.metaclass_type, template, actual, template + ) + ) if isinstance(actual, Overloaded) and actual.fallback is not None: actual = actual.fallback diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index e490457ff25c..96b3a484f56a 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -3977,3 +3977,24 @@ class C: DEFAULT: ClassVar[C] x: P = C() + +[case testInferenceViaTypeTypeMetaclass] +from typing import Iterator, Iterable, TypeVar, Type + +M = TypeVar("M") + +class Meta(type): + def __iter__(self: Type[M]) -> Iterator[M]: ... +class Foo(metaclass=Meta): ... + +T = TypeVar("T") +def test(x: Iterable[T]) -> T: ... + +reveal_type(test(Foo)) # N: Revealed type is "__main__.Foo" +t_foo: Type[Foo] +reveal_type(test(t_foo)) # N: Revealed type is "__main__.Foo" + +TF = TypeVar("TF", bound=Foo) +def outer(cls: Type[TF]) -> TF: + reveal_type(test(cls)) # N: Revealed type is "TF`-1" + return cls() diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 3520b5874018..b414eba9f679 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1858,3 +1858,22 @@ _testTupleWithDifferentArgsPy310.py:20: note: Revealed type is "builtins.list[Tu _testTupleWithDifferentArgsPy310.py:26: error: Invalid type: try using Literal[1] instead? _testTupleWithDifferentArgsPy310.py:27: error: Unexpected "..." _testTupleWithDifferentArgsPy310.py:30: note: Revealed type is "builtins.tuple[builtins.object, ...]" + +[case testEnumIterMetaInference] +import socket +from enum import Enum +from typing import Iterable, Iterator, Type, TypeVar + +_E = TypeVar("_E", bound=Enum) + +def enum_iter(cls: Type[_E]) -> Iterable[_E]: + reveal_type(iter(cls)) + reveal_type(next(iter(cls))) + return iter(cls) + +for value in enum_iter(socket.SocketKind): + reveal_type(value) +[out] +_testEnumIterMetaInference.py:8: note: Revealed type is "typing.Iterator[_E`-1]" +_testEnumIterMetaInference.py:9: note: Revealed type is "_E`-1" +_testEnumIterMetaInference.py:13: note: Revealed type is "socket.SocketKind" From 8e9f89a8498a1242cf1a163f6dbdbd15da54c9a0 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 30 Jan 2023 09:59:42 +0100 Subject: [PATCH 217/292] Fix `AttrsInstance` protocol check with cache (#14551) Use correct fullname for `__attrs_attrs__` ClassVar to fix issue with warm cache. Closes #14099 --- mypy/plugins/attrs.py | 2 +- test-data/unit/fine-grained-attr.test | 34 +++++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 16e8891e5f57..50d2955d2584 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -828,7 +828,7 @@ def _add_attrs_magic_attribute( ctx.cls, MAGIC_ATTR_NAME, TupleType(attributes_types, fallback=attributes_type), - fullname=f"{ctx.cls.fullname}.{attr_name}", + fullname=f"{ctx.cls.fullname}.{MAGIC_ATTR_NAME}", override_allow_incompatible=True, is_classvar=True, ) diff --git a/test-data/unit/fine-grained-attr.test b/test-data/unit/fine-grained-attr.test index fd7c97da0662..3fd40b774c7b 100644 --- a/test-data/unit/fine-grained-attr.test +++ b/test-data/unit/fine-grained-attr.test @@ -46,3 +46,37 @@ A.__attrs_attrs__.b [out] == + +[case magicAttributeConsistency2-only_when_cache] +[file c.py] +import attr + +@attr.s +class Entry: + var: int = attr.ib() +[builtins fixtures/attr.pyi] + +[file m.py] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> None: ... +func(Entry(2)) + +[file m.py.2] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> int: + return 2 # Change return type to force reanalysis + +func(Entry(2)) + +[out] +== From f31d162415c97096cce62827bc89db0c751d6bfb Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Mon, 30 Jan 2023 04:20:36 -0500 Subject: [PATCH 218/292] Fail-fast on missing builtins (#14550) As discussed in #14547, some mypy features were degrading rather than failing-fast when certain built-in types (list, dict) were not present in the test environment. - The degraded state (e.g. lack of `__annotations__`) didn't make the culprit (sparse fixture) obvious, making tests harder to debug. - Having the code work around quirks of the testing environment ("sparse fixtures") is an anti-pattern. --- mypy/messages.py | 2 - mypy/semanal.py | 28 +++--- mypy/semanal_namedtuple.py | 8 +- test-data/unit/check-dynamic-typing.test | 2 + test-data/unit/check-generics.test | 1 + test-data/unit/check-incomplete-fixture.test | 8 -- test-data/unit/check-tuples.test | 1 + test-data/unit/cmdline.test | 2 + test-data/unit/fine-grained.test | 4 +- test-data/unit/fixtures/__init_subclass__.pyi | 1 + test-data/unit/fixtures/__new__.pyi | 1 + test-data/unit/fixtures/alias.pyi | 2 + test-data/unit/fixtures/any.pyi | 2 + test-data/unit/fixtures/attr.pyi | 2 + test-data/unit/fixtures/bool.pyi | 1 + test-data/unit/fixtures/callable.pyi | 1 + test-data/unit/fixtures/classmethod.pyi | 3 + test-data/unit/fixtures/complex.pyi | 1 + test-data/unit/fixtures/complex_tuple.pyi | 1 + test-data/unit/fixtures/divmod.pyi | 2 + test-data/unit/fixtures/exception.pyi | 2 + test-data/unit/fixtures/f_string.pyi | 2 + test-data/unit/fixtures/fine_grained.pyi | 1 + test-data/unit/fixtures/float.pyi | 2 + test-data/unit/fixtures/for.pyi | 1 + test-data/unit/fixtures/function.pyi | 1 + test-data/unit/fixtures/isinstance.pyi | 2 + .../unit/fixtures/isinstance_python3_10.pyi | 2 + test-data/unit/fixtures/list.pyi | 2 + test-data/unit/fixtures/module_all.pyi | 1 + test-data/unit/fixtures/notimplemented.pyi | 1 + test-data/unit/fixtures/object_hashable.pyi | 1 + test-data/unit/fixtures/ops.pyi | 2 + test-data/unit/fixtures/property.pyi | 1 + test-data/unit/fixtures/set.pyi | 2 + test-data/unit/fixtures/slice.pyi | 1 + test-data/unit/fixtures/staticmethod.pyi | 1 + test-data/unit/fixtures/transform.pyi | 2 + test-data/unit/fixtures/tuple-simple.pyi | 1 + test-data/unit/fixtures/tuple.pyi | 2 + test-data/unit/fixtures/union.pyi | 1 + test-data/unit/lib-stub/builtins.pyi | 9 +- test-data/unit/merge.test | 96 +++++++++---------- test-data/unit/typexport-basic.test | 4 + 44 files changed, 131 insertions(+), 82 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 750dcdd42398..b529615e564e 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -122,8 +122,6 @@ # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. SUGGESTED_TEST_FIXTURES: Final = { - "builtins.list": "list.pyi", - "builtins.dict": "dict.pyi", "builtins.set": "set.pyi", "builtins.tuple": "tuple.pyi", "builtins.bool": "bool.pyi", diff --git a/mypy/semanal.py b/mypy/semanal.py index 382d3e650995..79302b4d08e1 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -625,23 +625,23 @@ def add_implicit_module_attrs(self, file_node: MypyFile) -> None: continue # Need to construct the type ourselves, to avoid issues with __builtins__.list # not being subscriptable or typing.List not getting bound - sym = self.lookup_qualified("__builtins__.list", Context()) - if not sym: - continue - node = sym.node - if not isinstance(node, TypeInfo): - self.defer(node) + inst = self.named_type_or_none("builtins.list", [str_type]) + if inst is None: + assert not self.final_iteration, "Cannot find builtins.list to add __path__" + self.defer() return - typ = Instance(node, [str_type]) + typ = inst elif name == "__annotations__": - sym = self.lookup_qualified("__builtins__.dict", Context(), suppress_errors=True) - if not sym: - continue - node = sym.node - if not isinstance(node, TypeInfo): - self.defer(node) + inst = self.named_type_or_none( + "builtins.dict", [str_type, AnyType(TypeOfAny.special_form)] + ) + if inst is None: + assert ( + not self.final_iteration + ), "Cannot find builtins.dict to add __annotations__" + self.defer() return - typ = Instance(node, [str_type, AnyType(TypeOfAny.special_form)]) + typ = inst else: assert t is not None, f"type should be specified for {name}" typ = UnboundType(t) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 226c2e50326b..1194557836b1 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -481,13 +481,9 @@ def build_namedtuple_typeinfo( strtype = self.api.named_type("builtins.str") implicit_any = AnyType(TypeOfAny.special_form) basetuple_type = self.api.named_type("builtins.tuple", [implicit_any]) - dictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + dictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) # Actual signature should return OrderedDict[str, Union[types]] - ordereddictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + ordereddictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) fallback = self.api.named_type("builtins.tuple", [implicit_any]) # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. # but it can't be expressed. 'new' and 'len' should be callable types. diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test index 7e62c0d0b0e8..dd4cc1579639 100644 --- a/test-data/unit/check-dynamic-typing.test +++ b/test-data/unit/check-dynamic-typing.test @@ -147,6 +147,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testBinaryOperationsWithDynamicAsRightOperand] from typing import Any @@ -219,6 +220,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testDynamicWithUnaryExpressions] from typing import Any diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 1be3145b3b10..a62028ca94ea 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -1331,6 +1331,7 @@ class type: pass class tuple: pass class function: pass class str: pass +class dict: pass [case testMultipleAssignmentWithIterable] from typing import Iterable, TypeVar diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test index f06dad293184..146494df1bd6 100644 --- a/test-data/unit/check-incomplete-fixture.test +++ b/test-data/unit/check-incomplete-fixture.test @@ -12,14 +12,6 @@ import m m.x # E: "object" has no attribute "x" [file m.py] -[case testDictMissingFromStubs] -from typing import Dict -def f(x: Dict[int]) -> None: pass -[out] -main:1: error: Module "typing" has no attribute "Dict" -main:1: note: Maybe your test fixture does not define "builtins.dict"? -main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description - [case testSetMissingFromStubs] from typing import Set def f(x: Set[int]) -> None: pass diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 535a8ae5007e..266bfbf97888 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -774,6 +774,7 @@ class str: pass class bool: pass class type: pass class function: pass +class dict: pass -- For loop over tuple diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 9eba9ea1e906..c2e98cdb74f9 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1516,6 +1516,8 @@ a.py:2: note: By default the bodies of untyped functions are not checked, consid class object: pass class str(object): pass class int(object): pass +class list: pass +class dict: pass [file dir/stdlib/sys.pyi] [file dir/stdlib/types.pyi] [file dir/stdlib/typing.pyi] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index d47c21283c91..9f22dc9ab7ac 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1809,8 +1809,8 @@ def f() -> Iterator[None]: [typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [triggered] -2: , __main__ -3: , __main__, a +2: , , __main__ +3: , , __main__, a [out] main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" == diff --git a/test-data/unit/fixtures/__init_subclass__.pyi b/test-data/unit/fixtures/__init_subclass__.pyi index c5a17f60688e..b4618c28249e 100644 --- a/test-data/unit/fixtures/__init_subclass__.pyi +++ b/test-data/unit/fixtures/__init_subclass__.pyi @@ -11,3 +11,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi index bb4788df8fe9..401de6fb9cd1 100644 --- a/test-data/unit/fixtures/__new__.pyi +++ b/test-data/unit/fixtures/__new__.pyi @@ -16,3 +16,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi index 08b145f4efd1..2ec7703f00c4 100644 --- a/test-data/unit/fixtures/alias.pyi +++ b/test-data/unit/fixtures/alias.pyi @@ -12,3 +12,5 @@ class str: pass class function: pass bytes = str + +class dict: pass diff --git a/test-data/unit/fixtures/any.pyi b/test-data/unit/fixtures/any.pyi index d6d90b7b3e98..b1f8d83bf524 100644 --- a/test-data/unit/fixtures/any.pyi +++ b/test-data/unit/fixtures/any.pyi @@ -6,3 +6,5 @@ class int: pass class str: pass def any(i: Iterable[T]) -> bool: pass + +class dict: pass diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/attr.pyi index 3ac535c21108..3bd4f0ec7cbe 100644 --- a/test-data/unit/fixtures/attr.pyi +++ b/test-data/unit/fixtures/attr.pyi @@ -25,3 +25,5 @@ class complex: class str: pass class ellipsis: pass class tuple: pass +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index 0f6e1a174c7b..bc58a22b952b 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -17,3 +17,4 @@ class str: pass class ellipsis: pass class list(Generic[T]): pass class property: pass +class dict: pass diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi index 4ad72bee93ec..44abf0691ceb 100644 --- a/test-data/unit/fixtures/callable.pyi +++ b/test-data/unit/fixtures/callable.pyi @@ -28,3 +28,4 @@ class str: def __eq__(self, other: 'str') -> bool: pass class ellipsis: pass class list: ... +class dict: pass diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi index 03ad803890a3..97e018b1dc1c 100644 --- a/test-data/unit/fixtures/classmethod.pyi +++ b/test-data/unit/fixtures/classmethod.pyi @@ -26,3 +26,6 @@ class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass + +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi index bcd03a2562e5..880ec3dd4d9d 100644 --- a/test-data/unit/fixtures/complex.pyi +++ b/test-data/unit/fixtures/complex.pyi @@ -10,3 +10,4 @@ class int: pass class float: pass class complex: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex_tuple.pyi b/test-data/unit/fixtures/complex_tuple.pyi index 6be46ac34573..81f1d33d1207 100644 --- a/test-data/unit/fixtures/complex_tuple.pyi +++ b/test-data/unit/fixtures/complex_tuple.pyi @@ -13,3 +13,4 @@ class float: pass class complex: pass class str: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/divmod.pyi b/test-data/unit/fixtures/divmod.pyi index cf41c500f49b..4d81d8fb47a2 100644 --- a/test-data/unit/fixtures/divmod.pyi +++ b/test-data/unit/fixtures/divmod.pyi @@ -19,3 +19,5 @@ class ellipsis: pass _N = TypeVar('_N', int, float) def divmod(_x: _N, _y: _N) -> Tuple[_N, _N]: ... + +class dict: pass diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index 70e3b19c4149..08496e4e5934 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -8,6 +8,8 @@ class object: class type: pass class tuple(Generic[T]): def __ge__(self, other: object) -> bool: ... +class list: pass +class dict: pass class function: pass class int: pass class str: pass diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi index 78d39aee85b8..328c666b7ece 100644 --- a/test-data/unit/fixtures/f_string.pyi +++ b/test-data/unit/fixtures/f_string.pyi @@ -34,3 +34,5 @@ class str: def format(self, *args) -> str: pass def join(self, l: List[str]) -> str: pass + +class dict: pass diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi index b2e104ccfceb..e454a27a5ebd 100644 --- a/test-data/unit/fixtures/fine_grained.pyi +++ b/test-data/unit/fixtures/fine_grained.pyi @@ -27,3 +27,4 @@ class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Generic[T]): pass +class dict: pass diff --git a/test-data/unit/fixtures/float.pyi b/test-data/unit/fixtures/float.pyi index 880b16a2321b..5db4525849c0 100644 --- a/test-data/unit/fixtures/float.pyi +++ b/test-data/unit/fixtures/float.pyi @@ -34,3 +34,5 @@ class float: def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... + +class dict: pass diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi index 31f6de78d486..694f83e940b2 100644 --- a/test-data/unit/fixtures/for.pyi +++ b/test-data/unit/fixtures/for.pyi @@ -18,3 +18,4 @@ class str: pass # for convenience class list(Iterable[t], Generic[t]): def __iter__(self) -> Iterator[t]: pass +class dict: pass diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi index c00a7846628a..697d0d919d98 100644 --- a/test-data/unit/fixtures/function.pyi +++ b/test-data/unit/fixtures/function.pyi @@ -5,3 +5,4 @@ class type: pass class function: pass class int: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi index aa8bfce7fbe0..c1125c24b941 100644 --- a/test-data/unit/fixtures/isinstance.pyi +++ b/test-data/unit/fixtures/isinstance.pyi @@ -25,3 +25,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/isinstance_python3_10.pyi b/test-data/unit/fixtures/isinstance_python3_10.pyi index abb37ea81c00..7c919a216bfb 100644 --- a/test-data/unit/fixtures/isinstance_python3_10.pyi +++ b/test-data/unit/fixtures/isinstance_python3_10.pyi @@ -27,3 +27,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi index 31dc333b3d4f..90fbabe8bc92 100644 --- a/test-data/unit/fixtures/list.pyi +++ b/test-data/unit/fixtures/list.pyi @@ -36,3 +36,5 @@ class str: class bool(int): pass property = object() # Dummy definition. + +class dict: pass diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi index 87959fefbff5..b14152c7e98f 100644 --- a/test-data/unit/fixtures/module_all.pyi +++ b/test-data/unit/fixtures/module_all.pyi @@ -16,3 +16,4 @@ class list(Generic[_T], Sequence[_T]): def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/notimplemented.pyi b/test-data/unit/fixtures/notimplemented.pyi index e619a6c5ad85..2ca376ea0760 100644 --- a/test-data/unit/fixtures/notimplemented.pyi +++ b/test-data/unit/fixtures/notimplemented.pyi @@ -11,3 +11,4 @@ class bool: pass class int: pass class str: pass NotImplemented = cast(Any, None) +class dict: pass diff --git a/test-data/unit/fixtures/object_hashable.pyi b/test-data/unit/fixtures/object_hashable.pyi index 592cba808cbf..49b17991f01c 100644 --- a/test-data/unit/fixtures/object_hashable.pyi +++ b/test-data/unit/fixtures/object_hashable.pyi @@ -7,3 +7,4 @@ class float: ... class str: ... class ellipsis: ... class tuple: ... +class dict: pass diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index 2b29414448cf..9cc4d22eb0a7 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -72,3 +72,5 @@ def __print(a1: object = None, a2: object = None, a3: object = None, a4: object = None) -> None: pass class ellipsis: pass + +class dict: pass diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi index 9dca0d50a3be..2397c05c78d5 100644 --- a/test-data/unit/fixtures/property.pyi +++ b/test-data/unit/fixtures/property.pyi @@ -13,6 +13,7 @@ class function: pass property = object() # Dummy definition class classmethod: pass +class list: pass class dict: pass class int: pass class str: pass diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi index d397d4f54af2..71d3bd2eee18 100644 --- a/test-data/unit/fixtures/set.pyi +++ b/test-data/unit/fixtures/set.pyi @@ -25,3 +25,5 @@ class set(Iterable[T], Generic[T]): def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass + +class dict: pass diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi index 947d49ea09fb..b5a4549da068 100644 --- a/test-data/unit/fixtures/slice.pyi +++ b/test-data/unit/fixtures/slice.pyi @@ -14,3 +14,4 @@ class str: pass class slice: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi index 08fbda8ccf8f..8a87121b2a71 100644 --- a/test-data/unit/fixtures/staticmethod.pyi +++ b/test-data/unit/fixtures/staticmethod.pyi @@ -18,3 +18,4 @@ class int: class str: pass class bytes: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi index afdc2bf5b59a..7dbb8fa90dbe 100644 --- a/test-data/unit/fixtures/transform.pyi +++ b/test-data/unit/fixtures/transform.pyi @@ -28,3 +28,5 @@ def __print(a1=None, a2=None, a3=None, a4=None): # Do not use *args since this would require list and break many test # cases. pass + +class dict: pass diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi index b195dfa59729..6c816c1c5b7a 100644 --- a/test-data/unit/fixtures/tuple-simple.pyi +++ b/test-data/unit/fixtures/tuple-simple.pyi @@ -18,3 +18,4 @@ class function: pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 60e47dd02220..0261731304b1 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -51,3 +51,5 @@ def isinstance(x: object, t: type) -> bool: pass def sum(iterable: Iterable[T], start: Optional[T] = None) -> T: pass class BaseException: pass + +class dict: pass diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi index 489e3ddb6ef9..350e145a6f8f 100644 --- a/test-data/unit/fixtures/union.pyi +++ b/test-data/unit/fixtures/union.pyi @@ -15,3 +15,4 @@ class tuple(Generic[T]): pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 82e0f6135614..c2ac78c41661 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -21,8 +21,13 @@ class function: __name__: str class ellipsis: pass -from typing import Generic, Sequence, TypeVar +from typing import Generic, Iterator, Sequence, TypeVar _T = TypeVar('_T') -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> bool: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + +class dict: pass # Definition of None is implicit diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index a593a064cbb2..144a095440f2 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -669,18 +669,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -693,19 +693,19 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x'], Literal['y']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>) - y<20> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x'], Literal['y']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>) + y<21> (target.A<0>))) [case testNamedTupleOldVersion_typeinfo] import target @@ -730,17 +730,17 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -753,18 +753,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>) - y<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>) + y<20> (target.A<0>))) [case testUnionType_types] import target diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 26caef0d6dde..cd4071eb14ee 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -139,6 +139,8 @@ class float: def __sub__(self, x: int) -> float: pass class type: pass class str: pass +class list: pass +class dict: pass [out] OpExpr(3) : builtins.int OpExpr(4) : builtins.float @@ -165,6 +167,8 @@ class bool: pass class type: pass class function: pass class str: pass +class list: pass +class dict: pass [out] ComparisonExpr(3) : builtins.bool ComparisonExpr(4) : builtins.bool From cf2e40446af3e1725649e4a2d72b1e639045a6fd Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 30 Jan 2023 01:21:02 -0800 Subject: [PATCH 219/292] stubgen: fix crash with PEP 604 union in typevar bound (#14557) Fixes #14533 --- mypy/stubgen.py | 4 ++++ test-data/unit/stubgen.test | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 8c7e24504270..bed552c3e214 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -95,6 +95,7 @@ MemberExpr, MypyFile, NameExpr, + OpExpr, OverloadedFuncDef, Statement, StrExpr, @@ -402,6 +403,9 @@ def visit_list_expr(self, node: ListExpr) -> str: def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." + def visit_op_expr(self, o: OpExpr) -> str: + return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}" + class ImportTracker: """Record necessary imports during stub generation.""" diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 8467271e5593..009db553237f 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2734,3 +2734,12 @@ class Some: def __int__(self) -> int: ... def __float__(self) -> float: ... def __index__(self) -> int: ... + + +[case testTypeVarPEP604Bound] +from typing import TypeVar +T = TypeVar("T", bound=str | None) +[out] +from typing import TypeVar + +T = TypeVar('T', bound=str | None) From b2cf9d1021a812e395e6d276f64e25642e1054e7 Mon Sep 17 00:00:00 2001 From: jhance Date: Mon, 30 Jan 2023 06:27:33 -0800 Subject: [PATCH 220/292] [mypyc] Optimize __(a)enter__/__(a)exit__ paths for native case (#14530) Closes mypyc/mypyc#904 Directly calls enter and exit handlers in the case that the context manager is implemented natively. Unfortunately the implementation becomes a bit more complicated because there are two different places where we call exit in different ways, and they both need to support the native and non-native cases. --- mypyc/irbuild/statement.py | 54 +++++++++----- mypyc/test-data/irbuild-try.test | 105 ++++++++++++++++++++++++++++ mypyc/test-data/run-generators.test | 17 +++++ mypyc/test-data/run-misc.test | 30 ++++++++ 4 files changed, 190 insertions(+), 16 deletions(-) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 6e465893607d..b9754ba1a147 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -50,6 +50,7 @@ Integer, LoadAddress, LoadErrorValue, + MethodCall, RaiseStandardError, Register, Return, @@ -61,6 +62,7 @@ RInstance, exc_rtuple, is_tagged, + none_rprimitive, object_pointer_rprimitive, object_rprimitive, ) @@ -657,14 +659,45 @@ def transform_with( al = "a" if is_async else "" mgr_v = builder.accept(expr) - typ = builder.call_c(type_op, [mgr_v], line) - exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) - value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + is_native = isinstance(mgr_v.type, RInstance) + if is_native: + value = builder.add(MethodCall(mgr_v, f"__{al}enter__", args=[], line=line)) + exit_ = None + else: + typ = builder.call_c(type_op, [mgr_v], line) + exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) + value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + mgr = builder.maybe_spill(mgr_v) exc = builder.maybe_spill_assignable(builder.true()) if is_async: value = emit_await(builder, value, line) + def maybe_natively_call_exit(exc_info: bool) -> Value: + if exc_info: + args = get_sys_exc_info(builder) + else: + none = builder.none_object() + args = [none, none, none] + + if is_native: + assert isinstance(mgr_v.type, RInstance) + exit_val = builder.gen_method_call( + builder.read(mgr), + f"__{al}exit__", + arg_values=args, + line=line, + result_type=none_rprimitive, + ) + else: + assert exit_ is not None + exit_val = builder.py_call(builder.read(exit_), [builder.read(mgr)] + args, line) + + if is_async: + return emit_await(builder, exit_val, line) + else: + return exit_val + def try_body() -> None: if target: builder.assign(builder.get_assignment_target(target), value, line) @@ -673,13 +706,7 @@ def try_body() -> None: def except_body() -> None: builder.assign(exc, builder.false(), line) out_block, reraise_block = BasicBlock(), BasicBlock() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr)] + get_sys_exc_info(builder), line - ) - if is_async: - exit_val = emit_await(builder, exit_val, line) - - builder.add_bool_branch(exit_val, out_block, reraise_block) + builder.add_bool_branch(maybe_natively_call_exit(exc_info=True), out_block, reraise_block) builder.activate_block(reraise_block) builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) builder.add(Unreachable()) @@ -689,13 +716,8 @@ def finally_body() -> None: out_block, exit_block = BasicBlock(), BasicBlock() builder.add(Branch(builder.read(exc), exit_block, out_block, Branch.BOOL)) builder.activate_block(exit_block) - none = builder.none_object() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr), none, none, none], line - ) - if is_async: - emit_await(builder, exit_val, line) + maybe_natively_call_exit(exc_info=False) builder.goto_and_activate(out_block) transform_try_finally_stmt( diff --git a/mypyc/test-data/irbuild-try.test b/mypyc/test-data/irbuild-try.test index d1119c5deefd..faf3fa1dbd2f 100644 --- a/mypyc/test-data/irbuild-try.test +++ b/mypyc/test-data/irbuild-try.test @@ -416,3 +416,108 @@ L19: L20: return 1 +[case testWithNativeSimple] +class DummyContext: + def __enter__(self) -> None: + pass + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + pass + +def foo(x: DummyContext) -> None: + with x: + print('hello') +[out] +def DummyContext.__enter__(self): + self :: __main__.DummyContext +L0: + return 1 +def DummyContext.__exit__(self, exc_type, exc_val, exc_tb): + self :: __main__.DummyContext + exc_type, exc_val, exc_tb :: object +L0: + return 1 +def foo(x): + x :: __main__.DummyContext + r0 :: None + r1 :: bool + r2 :: str + r3 :: object + r4 :: str + r5, r6 :: object + r7, r8 :: tuple[object, object, object] + r9, r10, r11 :: object + r12 :: None + r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: bit + r18, r19, r20 :: tuple[object, object, object] + r21 :: object + r22 :: None + r23 :: bit +L0: + r0 = x.__enter__() + r1 = 1 +L1: +L2: + r2 = 'hello' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) + goto L8 +L3: (handler for L2) + r7 = CPy_CatchError() + r1 = 0 + r8 = CPy_GetExcInfo() + r9 = r8[0] + r10 = r8[1] + r11 = r8[2] + r12 = x.__exit__(r9, r10, r11) + r13 = box(None, r12) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L5 else goto L4 :: bool +L4: + CPy_Reraise() + unreachable +L5: +L6: + CPy_RestoreExcInfo(r7) + goto L8 +L7: (handler for L3, L4, L5) + CPy_RestoreExcInfo(r7) + r17 = CPy_KeepPropagating() + unreachable +L8: +L9: +L10: + r18 = :: tuple[object, object, object] + r19 = r18 + goto L12 +L11: (handler for L1, L6, L7, L8) + r20 = CPy_CatchError() + r19 = r20 +L12: + if r1 goto L13 else goto L14 :: bool +L13: + r21 = load_address _Py_NoneStruct + r22 = x.__exit__(r21, r21, r21) +L14: + if is_error(r19) goto L16 else goto L15 +L15: + CPy_Reraise() + unreachable +L16: + goto L20 +L17: (handler for L12, L13, L14, L15) + if is_error(r19) goto L19 else goto L18 +L18: + CPy_RestoreExcInfo(r19) +L19: + r23 = CPy_KeepPropagating() + unreachable +L20: + return 1 diff --git a/mypyc/test-data/run-generators.test b/mypyc/test-data/run-generators.test index 0f2cbe152fc0..bcf9da1846ae 100644 --- a/mypyc/test-data/run-generators.test +++ b/mypyc/test-data/run-generators.test @@ -662,3 +662,20 @@ def list_comp() -> List[int]: [file driver.py] from native import list_comp assert list_comp() == [5] + +[case testWithNative] +class DummyContext: + def __init__(self) -> None: + self.x = 0 + + def __enter__(self) -> None: + self.x += 1 + + def __exit__(self, exc_type, exc_value, exc_tb) -> None: + self.x -= 1 + +def test_basic() -> None: + context = DummyContext() + with context: + assert context.x == 1 + assert context.x == 0 diff --git a/mypyc/test-data/run-misc.test b/mypyc/test-data/run-misc.test index 001e0aa41b25..267a3441808f 100644 --- a/mypyc/test-data/run-misc.test +++ b/mypyc/test-data/run-misc.test @@ -1116,3 +1116,33 @@ i = b"foo" def test_redefinition() -> None: assert i == b"foo" + +[case testWithNative] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 + +[case testWithNativeVarArgs] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, *args: object) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 From 1d247eaa0e4b61947ec807d8179e3356d1e7bba6 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Mon, 30 Jan 2023 12:33:15 -0600 Subject: [PATCH 221/292] Fix bug with in operator used with a union of Container and Iterable (#14384) Fixes #4954. Modifies analysis of `in` comparison expressions. Previously, mypy would check the right operand of an `in` expression to see if it was a union of `Container`s, and then if it was a union of `Iterable`s, but would fail on unions of both `Container`s and `Iterable`s. --- mypy/checker.py | 20 +++++ mypy/checkexpr.py | 141 ++++++++++++++++++++----------- test-data/unit/check-unions.test | 17 ++++ 3 files changed, 127 insertions(+), 51 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 1f635c09bc0a..f8461fefc55f 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4500,6 +4500,26 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: # Non-tuple iterable. return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0] + def analyze_iterable_item_type_without_expression( + self, type: Type, context: Context + ) -> tuple[Type, Type]: + """Analyse iterable type and return iterator and iterator item types.""" + echk = self.expr_checker + iterable = get_proper_type(type) + iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], context)[0] + + if isinstance(iterable, TupleType): + joined: Type = UninhabitedType() + for item in iterable.items: + joined = join_types(joined, item) + return iterator, joined + else: + # Non-tuple iterable. + return ( + iterator, + echk.check_method_call_by_name("__next__", iterator, [], [], context)[0], + ) + def analyze_range_native_int_type(self, expr: Expression) -> Type | None: """Try to infer native int item type from arguments to range(...). diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index d918eb9b5467..2a04aeddb634 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2919,68 +2919,108 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' """ result: Type | None = None - sub_result: Type | None = None + sub_result: Type # Check each consecutive operand pair and their operator for left, right, operator in zip(e.operands, e.operands[1:], e.operators): left_type = self.accept(left) - method_type: mypy.types.Type | None = None - if operator == "in" or operator == "not in": + # This case covers both iterables and containers, which have different meanings. + # For a container, the in operator calls the __contains__ method. + # For an iterable, the in operator iterates over the iterable, and compares each item one-by-one. + # We allow `in` for a union of containers and iterables as long as at least one of them matches the + # type of the left operand, as the operation will simply return False if the union's container/iterator + # type doesn't match the left operand. + # If the right operand has partial type, look it up without triggering # a "Need type annotation ..." message, as it would be noise. right_type = self.find_partial_type_ref_fast_path(right) if right_type is None: right_type = self.accept(right) # Validate the right operand - # Keep track of whether we get type check errors (these won't be reported, they - # are just to verify whether something is valid typing wise). - with self.msg.filter_errors(save_filtered_errors=True) as local_errors: - _, method_type = self.check_method_call_by_name( - method="__contains__", - base_type=right_type, - args=[left], - arg_kinds=[ARG_POS], - context=e, - ) + right_type = get_proper_type(right_type) + item_types: Sequence[Type] = [right_type] + if isinstance(right_type, UnionType): + item_types = list(right_type.items) sub_result = self.bool_type() - # Container item type for strict type overlap checks. Note: we need to only - # check for nominal type, because a usual "Unsupported operands for in" - # will be reported for types incompatible with __contains__(). - # See testCustomContainsCheckStrictEquality for an example. - cont_type = self.chk.analyze_container_item_type(right_type) - if isinstance(right_type, PartialType): - # We don't really know if this is an error or not, so just shut up. - pass - elif ( - local_errors.has_new_errors() - and - # is_valid_var_arg is True for any Iterable - self.is_valid_var_arg(right_type) - ): - _, itertype = self.chk.analyze_iterable_item_type(right) - method_type = CallableType( - [left_type], - [nodes.ARG_POS], - [None], - self.bool_type(), - self.named_type("builtins.function"), - ) - if not is_subtype(left_type, itertype): - self.msg.unsupported_operand_types("in", left_type, right_type, e) - # Only show dangerous overlap if there are no other errors. - elif ( - not local_errors.has_new_errors() - and cont_type - and self.dangerous_comparison( - left_type, cont_type, original_container=right_type, prefer_literal=False - ) - ): - self.msg.dangerous_comparison(left_type, cont_type, "container", e) - else: - self.msg.add_errors(local_errors.filtered_errors()) + + container_types: list[Type] = [] + iterable_types: list[Type] = [] + failed_out = False + encountered_partial_type = False + + for item_type in item_types: + # Keep track of whether we get type check errors (these won't be reported, they + # are just to verify whether something is valid typing wise). + with self.msg.filter_errors(save_filtered_errors=True) as container_errors: + _, method_type = self.check_method_call_by_name( + method="__contains__", + base_type=item_type, + args=[left], + arg_kinds=[ARG_POS], + context=e, + original_type=right_type, + ) + # Container item type for strict type overlap checks. Note: we need to only + # check for nominal type, because a usual "Unsupported operands for in" + # will be reported for types incompatible with __contains__(). + # See testCustomContainsCheckStrictEquality for an example. + cont_type = self.chk.analyze_container_item_type(item_type) + + if isinstance(item_type, PartialType): + # We don't really know if this is an error or not, so just shut up. + encountered_partial_type = True + pass + elif ( + container_errors.has_new_errors() + and + # is_valid_var_arg is True for any Iterable + self.is_valid_var_arg(item_type) + ): + # it's not a container, but it is an iterable + with self.msg.filter_errors(save_filtered_errors=True) as iterable_errors: + _, itertype = self.chk.analyze_iterable_item_type_without_expression( + item_type, e + ) + if iterable_errors.has_new_errors(): + self.msg.add_errors(iterable_errors.filtered_errors()) + failed_out = True + else: + method_type = CallableType( + [left_type], + [nodes.ARG_POS], + [None], + self.bool_type(), + self.named_type("builtins.function"), + ) + e.method_types.append(method_type) + iterable_types.append(itertype) + elif not container_errors.has_new_errors() and cont_type: + container_types.append(cont_type) + e.method_types.append(method_type) + else: + self.msg.add_errors(container_errors.filtered_errors()) + failed_out = True + + if not encountered_partial_type and not failed_out: + iterable_type = UnionType.make_union(iterable_types) + if not is_subtype(left_type, iterable_type): + if len(container_types) == 0: + self.msg.unsupported_operand_types("in", left_type, right_type, e) + else: + container_type = UnionType.make_union(container_types) + if self.dangerous_comparison( + left_type, + container_type, + original_container=right_type, + prefer_literal=False, + ): + self.msg.dangerous_comparison( + left_type, container_type, "container", e + ) + elif operator in operators.op_methods: method = operators.op_methods[operator] @@ -2988,6 +3028,7 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: sub_result, method_type = self.check_op( method, left_type, right, e, allow_reverse=True ) + e.method_types.append(method_type) # Only show dangerous overlap if there are no other errors. See # testCustomEqCheckStrictEquality for an example. @@ -3007,12 +3048,10 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: left_type = try_getting_literal(left_type) right_type = try_getting_literal(right_type) self.msg.dangerous_comparison(left_type, right_type, "identity", e) - method_type = None + e.method_types.append(None) else: raise RuntimeError(f"Unknown comparison operator {operator}") - e.method_types.append(method_type) - # Determine type of boolean-and of result and sub_result if result is None: result = sub_result diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index cabc28e786b2..65d5c1abc7e8 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -1202,3 +1202,20 @@ def foo( yield i foo([1]) [builtins fixtures/list.pyi] + +[case testUnionIterableContainer] +from typing import Iterable, Container, Union + +i: Iterable[str] +c: Container[str] +u: Union[Iterable[str], Container[str]] +ni: Union[Iterable[str], int] +nc: Union[Container[str], int] + +'x' in i +'x' in c +'x' in u +'x' in ni # E: Unsupported right operand type for in ("Union[Iterable[str], int]") +'x' in nc # E: Unsupported right operand type for in ("Union[Container[str], int]") +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] From 7c14ebae6ca5d6ec39600e122b58a62afaa3ab02 Mon Sep 17 00:00:00 2001 From: Chad Dombrova Date: Mon, 30 Jan 2023 13:39:31 -0800 Subject: [PATCH 222/292] stubgen: Allow aliases below the top level (#14388) --- mypy/stubgen.py | 5 ++-- test-data/unit/stubgen.test | 60 ++++++++++++++++++++++++++++++------- 2 files changed, 52 insertions(+), 13 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index bed552c3e214..51ee1b93de14 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -999,8 +999,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.process_namedtuple(lvalue, o.rvalue) continue if ( - self.is_top_level() - and isinstance(lvalue, NameExpr) + isinstance(lvalue, NameExpr) and not self.is_private_name(lvalue.name) and # it is never an alias with explicit annotation @@ -1118,7 +1117,7 @@ def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: def process_typealias(self, lvalue: NameExpr, rvalue: Expression) -> None: p = AliasPrinter(self) - self.add(f"{lvalue.name} = {rvalue.accept(p)}\n") + self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") self.record_name(lvalue.name) self._vars[-1].append(lvalue.name) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 009db553237f..4909c0005412 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -947,16 +947,6 @@ from typing import Any alias = Container[Any] -[case testAliasOnlyToplevel] -class Foo: - alias = str - -[out] -from _typeshed import Incomplete - -class Foo: - alias: Incomplete - [case testAliasExceptions] noalias1 = None noalias2 = ... @@ -969,6 +959,56 @@ noalias1: Incomplete noalias2: Incomplete noalias3: bool +[case testComplexAlias] +# modules: main a + +from a import valid + +def func() -> int: + return 2 + +aliased_func = func +int_value = 1 + +class A: + cls_var = valid + + def __init__(self, arg: str) -> None: + self.self_var = arg + + def meth(self) -> None: + func_value = int_value + + alias_meth = meth + alias_func = func + alias_alias_func = aliased_func + int_value = int_value + +[file a.py] +valid : list[int] = [1, 2, 3] + + +[out] +# main.pyi +from _typeshed import Incomplete +from a import valid + +def func() -> int: ... +aliased_func = func +int_value: int + +class A: + cls_var = valid + self_var: Incomplete + def __init__(self, arg: str) -> None: ... + def meth(self) -> None: ... + alias_meth = meth + alias_func = func + alias_alias_func = aliased_func + int_value = int_value +# a.pyi +valid: list[int] + -- More features/fixes: -- do not export deleted names From 28c67cbeb09a14865ccc17cc1d13debcdf744940 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Tue, 31 Jan 2023 16:11:47 +0900 Subject: [PATCH 223/292] More helpful type guards (#14238) Fixes #13199 Refs #14425 --- mypy/checker.py | 22 +++++++- mypy/semanal.py | 14 +++++ test-data/unit/check-python38.test | 28 +++++++++ test-data/unit/check-typeguard.test | 88 ++++++++++++++++++++++++++--- 4 files changed, 142 insertions(+), 10 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index f8461fefc55f..c9d2d3ede283 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5350,10 +5350,26 @@ def find_isinstance_check_helper(self, node: Expression) -> tuple[TypeMap, TypeM return self.hasattr_type_maps(expr, self.lookup_type(expr), attr[0]) elif isinstance(node.callee, RefExpr): if node.callee.type_guard is not None: - # TODO: Follow keyword args or *args, **kwargs + # TODO: Follow *args, **kwargs if node.arg_kinds[0] != nodes.ARG_POS: - self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node) - return {}, {} + # the first argument might be used as a kwarg + called_type = get_proper_type(self.lookup_type(node.callee)) + assert isinstance(called_type, (CallableType, Overloaded)) + + # *assuming* the overloaded function is correct, there's a couple cases: + # 1) The first argument has different names, but is pos-only. We don't + # care about this case, the argument must be passed positionally. + # 2) The first argument allows keyword reference, therefore must be the + # same between overloads. + name = called_type.items[0].arg_names[0] + + if name in node.arg_names: + idx = node.arg_names.index(name) + # we want the idx-th variable to be narrowed + expr = collapse_walrus(node.args[idx]) + else: + self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node) + return {}, {} if literal(expr) == LITERAL_TYPE: # Note: we wrap the target type, so that we can special case later. # Namely, for isinstance() we use a normal meet, while TypeGuard is diff --git a/mypy/semanal.py b/mypy/semanal.py index 79302b4d08e1..d0802c194943 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -864,6 +864,20 @@ def analyze_func_def(self, defn: FuncDef) -> None: return assert isinstance(result, ProperType) if isinstance(result, CallableType): + # type guards need to have a positional argument, to spec + if ( + result.type_guard + and ARG_POS not in result.arg_kinds[self.is_class_scope() :] + and not defn.is_static + ): + self.fail( + "TypeGuard functions must have a positional argument", + result, + code=codes.VALID_TYPE, + ) + # in this case, we just kind of just ... remove the type guard. + result = result.copy_modified(type_guard=None) + result = self.remove_unpack_kwargs(defn, result) if has_self_type and self.type is not None: info = self.type diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 7e5e0f3cf185..b9c798b9530e 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -735,6 +735,34 @@ class C(Generic[T]): main:10: note: Revealed type is "builtins.int" main:10: note: Revealed type is "builtins.str" +[case testTypeGuardWithPositionalOnlyArg] +# flags: --python-version 3.8 +from typing_extensions import TypeGuard + +def typeguard(x: object, /) -> TypeGuard[int]: + ... + +n: object +if typeguard(n): + reveal_type(n) +[builtins fixtures/tuple.pyi] +[out] +main:9: note: Revealed type is "builtins.int" + +[case testTypeGuardKeywordFollowingWalrus] +# flags: --python-version 3.8 +from typing import cast +from typing_extensions import TypeGuard + +def typeguard(x: object) -> TypeGuard[int]: + ... + +if typeguard(x=(n := cast(object, "hi"))): + reveal_type(n) +[builtins fixtures/tuple.pyi] +[out] +main:9: note: Revealed type is "builtins.int" + [case testNoCrashOnAssignmentExprClass] class C: [(j := i) for i in [1, 2, 3]] # E: Assignment expression within a comprehension cannot be used in a class body diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test index cf72e7033087..39bcb091f09e 100644 --- a/test-data/unit/check-typeguard.test +++ b/test-data/unit/check-typeguard.test @@ -37,8 +37,8 @@ reveal_type(foo) # N: Revealed type is "def (a: builtins.object) -> TypeGuard[b [case testTypeGuardCallArgsNone] from typing_extensions import TypeGuard class Point: pass -# TODO: error on the 'def' line (insufficient args for type guard) -def is_point() -> TypeGuard[Point]: pass + +def is_point() -> TypeGuard[Point]: pass # E: TypeGuard functions must have a positional argument def main(a: object) -> None: if is_point(): reveal_type(a) # N: Revealed type is "builtins.object" @@ -227,13 +227,13 @@ def main(a: object) -> None: from typing_extensions import TypeGuard def is_float(a: object, b: object = 0) -> TypeGuard[float]: pass def main1(a: object) -> None: - # This is debatable -- should we support these cases? + if is_float(a=a, b=1): + reveal_type(a) # N: Revealed type is "builtins.float" - if is_float(a=a, b=1): # E: Type guard requires positional argument - reveal_type(a) # N: Revealed type is "builtins.object" + if is_float(b=1, a=a): + reveal_type(a) # N: Revealed type is "builtins.float" - if is_float(b=1, a=a): # E: Type guard requires positional argument - reveal_type(a) # N: Revealed type is "builtins.object" + # This is debatable -- should we support these cases? ta = (a,) if is_float(*ta): # E: Type guard requires positional argument @@ -597,3 +597,77 @@ def func(names: Tuple[str, ...]): if is_two_element_tuple(names): reveal_type(names) # N: Revealed type is "Tuple[builtins.str, builtins.str]" [builtins fixtures/tuple.pyi] + +[case testTypeGuardErroneousDefinitionFails] +from typing_extensions import TypeGuard + +class Z: + def typeguard(self, *, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument + ... + +def bad_typeguard(*, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument + ... +[builtins fixtures/tuple.pyi] + +[case testTypeGuardWithKeywordArg] +from typing_extensions import TypeGuard + +class Z: + def typeguard(self, x: object) -> TypeGuard[int]: + ... + +def typeguard(x: object) -> TypeGuard[int]: + ... + +n: object +if typeguard(x=n): + reveal_type(n) # N: Revealed type is "builtins.int" + +if Z().typeguard(x=n): + reveal_type(n) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testStaticMethodTypeGuard] +from typing_extensions import TypeGuard + +class Y: + @staticmethod + def typeguard(h: object) -> TypeGuard[int]: + ... + +x: object +if Y().typeguard(x): + reveal_type(x) # N: Revealed type is "builtins.int" +if Y.typeguard(x): + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] +[builtins fixtures/classmethod.pyi] + +[case testTypeGuardKwargFollowingThroughOverloaded] +from typing import overload, Union +from typing_extensions import TypeGuard + +@overload +def typeguard(x: object, y: str) -> TypeGuard[str]: + ... + +@overload +def typeguard(x: object, y: int) -> TypeGuard[int]: + ... + +def typeguard(x: object, y: Union[int, str]) -> Union[TypeGuard[int], TypeGuard[str]]: + ... + +x: object +if typeguard(x=x, y=42): + reveal_type(x) # N: Revealed type is "builtins.int" + +if typeguard(y=42, x=x): + reveal_type(x) # N: Revealed type is "builtins.int" + +if typeguard(x=x, y="42"): + reveal_type(x) # N: Revealed type is "builtins.str" + +if typeguard(y="42", x=x): + reveal_type(x) # N: Revealed type is "builtins.str" +[builtins fixtures/tuple.pyi] From 1a781e7b46fa362718259187ca1f7cdfd9fad136 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 31 Jan 2023 01:05:22 -0800 Subject: [PATCH 224/292] Remove dead branch when analysing type aliases (#14566) Clean up from https://github.com/python/mypy/pull/14159 --- mypy/semanal.py | 11 +++-------- mypy/typeanal.py | 4 ++-- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index d0802c194943..cd5d5a0d808d 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3373,7 +3373,7 @@ def analyze_alias( tvar_def = self.tvar_scope.bind_new(name, tvar_expr) tvar_defs.append(tvar_def) - res = analyze_type_alias( + analyzed, depends_on = analyze_type_alias( typ, self, self.tvar_scope, @@ -3385,13 +3385,8 @@ def analyze_alias( global_scope=global_scope, allowed_alias_tvars=tvar_defs, ) - analyzed: Type | None = None - if res: - analyzed, depends_on = res - qualified_tvars = [node.fullname for (name, node) in found_type_vars] - else: - depends_on = set() - qualified_tvars = [] + + qualified_tvars = [node.fullname for _name, node in found_type_vars] return analyzed, tvar_defs, depends_on, qualified_tvars def is_pep_613(self, s: AssignmentStmt) -> bool: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 07720afeff88..2cd136e53842 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -132,12 +132,12 @@ def analyze_type_alias( in_dynamic_func: bool = False, global_scope: bool = True, allowed_alias_tvars: list[TypeVarLikeType] | None = None, -) -> tuple[Type, set[str]] | None: +) -> tuple[Type, set[str]]: """Analyze r.h.s. of a (potential) type alias definition. If `node` is valid as a type alias rvalue, return the resulting type and a set of full names of type aliases it depends on (directly or indirectly). - Return None otherwise. 'node' must have been semantically analyzed. + 'node' must have been semantically analyzed. """ analyzer = TypeAnalyser( api, From 90168b8396bf53f7573e8f0ca198ab33a855e3d2 Mon Sep 17 00:00:00 2001 From: Ilya Konstantinov Date: Tue, 31 Jan 2023 14:15:45 -0500 Subject: [PATCH 225/292] In error messages, quote just the module's name (#14567) This makes it consistent with other error messages, e.g. https://github.com/python/mypy/blob/1a781e7b46fa362718259187ca1f7cdfd9fad136/mypy/semanal.py#L2572-L2574 --- mypy/messages.py | 3 ++- test-data/unit/check-incremental.test | 2 +- test-data/unit/check-modules.test | 4 ++-- test-data/unit/check-protocols.test | 8 ++++---- test-data/unit/fine-grained.test | 2 +- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index b529615e564e..a5fd09493456 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2209,6 +2209,7 @@ def quote_type_string(type_string: str) -> str: no_quote_regex = r"^<(tuple|union): \d+ items>$" if ( type_string in ["Module", "overloaded function", "", ""] + or type_string.startswith("Module ") or re.match(no_quote_regex, type_string) is not None or type_string.endswith("?") ): @@ -2285,7 +2286,7 @@ def format_literal_value(typ: LiteralType) -> str: # Make some common error messages simpler and tidier. base_str = "Module" if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: - return f"{base_str} {itype.extra_attrs.mod_name}" + return f'{base_str} "{itype.extra_attrs.mod_name}"' return base_str if itype.type.fullname == "typing._SpecialForm": # This is not a real type but used for some typing-related constructs. diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 1aff1ba2862f..fed16bc683e2 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6054,7 +6054,7 @@ def update() -> str: ... [out] [out2] tmp/m.py:9: error: Argument 1 to "setup" has incompatible type Module; expected "Options" -tmp/m.py:9: note: Following member(s) of "Module default_config" have conflicts: +tmp/m.py:9: note: Following member(s) of Module "default_config" have conflicts: tmp/m.py:9: note: Expected: tmp/m.py:9: note: def update() -> bool tmp/m.py:9: note: Got: diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index b11a959df4cc..4b8308310ae6 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1852,7 +1852,7 @@ class C: import stub reveal_type(stub.y) # N: Revealed type is "builtins.int" -reveal_type(stub.z) # E: "Module stub" does not explicitly export attribute "z" \ +reveal_type(stub.z) # E: Module "stub" does not explicitly export attribute "z" \ # N: Revealed type is "Any" [file stub.pyi] @@ -1944,7 +1944,7 @@ import mod from mod import C, D # E: Module "mod" does not explicitly export attribute "C" reveal_type(mod.x) # N: Revealed type is "mod.submod.C" -mod.C # E: "Module mod" does not explicitly export attribute "C" +mod.C # E: Module "mod" does not explicitly export attribute "C" y = mod.D() reveal_type(y.a) # N: Revealed type is "builtins.str" diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 96b3a484f56a..c787b34bf26b 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -3720,10 +3720,10 @@ setup(bad_config_1) # E: Argument 1 to "setup" has incompatible type Module; ex # N: "ModuleType" is missing following "Options" protocol member: \ # N: timeout setup(bad_config_2) # E: Argument 1 to "setup" has incompatible type Module; expected "Options" \ - # N: Following member(s) of "Module bad_config_2" have conflicts: \ + # N: Following member(s) of Module "bad_config_2" have conflicts: \ # N: one_flag: expected "bool", got "int" setup(bad_config_3) # E: Argument 1 to "setup" has incompatible type Module; expected "Options" \ - # N: Following member(s) of "Module bad_config_3" have conflicts: \ + # N: Following member(s) of Module "bad_config_3" have conflicts: \ # N: Expected: \ # N: def update() -> bool \ # N: Got: \ @@ -3789,7 +3789,7 @@ class Result(Protocol): def run(x: Runner) -> None: ... run(runner) # OK run(bad_runner) # E: Argument 1 to "run" has incompatible type Module; expected "Runner" \ - # N: Following member(s) of "Module bad_runner" have conflicts: \ + # N: Following member(s) of Module "bad_runner" have conflicts: \ # N: Expected: \ # N: def (int, /) -> Result \ # N: Got: \ @@ -3821,7 +3821,7 @@ class Result(Protocol): def run(x: Runner) -> None: ... run(runner) # OK run(bad_runner) # E: Argument 1 to "run" has incompatible type Module; expected "Runner" \ - # N: Following member(s) of "Module bad_runner" have conflicts: \ + # N: Following member(s) of Module "bad_runner" have conflicts: \ # N: Expected: \ # N: def (int, /) -> Result \ # N: Got: \ diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 9f22dc9ab7ac..58339828677d 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9975,7 +9975,7 @@ def update() -> str: ... [out] == m.py:9: error: Argument 1 to "setup" has incompatible type Module; expected "Options" -m.py:9: note: Following member(s) of "Module default_config" have conflicts: +m.py:9: note: Following member(s) of Module "default_config" have conflicts: m.py:9: note: Expected: m.py:9: note: def update() -> bool m.py:9: note: Got: From c3750099f4bffeb1b962a6b59a3851d297f9ba41 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 1 Feb 2023 09:56:13 +0000 Subject: [PATCH 226/292] Fix crash on prefixed paramspec with deferral (#14569) Fixes #14565 The fix looks simple, looks like an obvious omission. --- mypy/types.py | 4 ++-- test-data/unit/check-parameter-specification.test | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 0244f57847c5..90d33839c693 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -716,13 +716,13 @@ def name_with_suffix(self) -> str: return n def __hash__(self) -> int: - return hash((self.id, self.flavor)) + return hash((self.id, self.flavor, self.prefix)) def __eq__(self, other: object) -> bool: if not isinstance(other, ParamSpecType): return NotImplemented # Upper bound can be ignored, since it's determined by flavor. - return self.id == other.id and self.flavor == other.flavor + return self.id == other.id and self.flavor == other.flavor and self.prefix == other.prefix def serialize(self) -> JsonDict: assert not self.id.is_meta_var() diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 463ba3e65466..56fc3b6faa14 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1456,3 +1456,18 @@ class C(Generic[T]): ... C[Callable[P, int]]() # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas [builtins fixtures/paramspec.pyi] + +[case testConcatDeferralNoCrash] +from typing import Callable, TypeVar +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") +T = TypeVar("T", bound="Defer") + +Alias = Callable[P, bool] +Concat = Alias[Concatenate[T, P]] + +def test(f: Concat[T, ...]) -> None: ... + +class Defer: ... +[builtins fixtures/paramspec.pyi] From 44a653c82dabae99fbd1d533021c1bbc6830808f Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Wed, 1 Feb 2023 12:02:43 -0600 Subject: [PATCH 227/292] [dataclass_transform] support class decorator parameters (#14561) The initial implementation of `typing.dataclass_transform` only supported the no-argument `@decorator` form; this adds support for the `@decorator(...)` form supporting the same arguments we support for `dataclasses.dataclass`. This also matches the list of arguments specified in PEP 681. Co-authored-by: Wesley Collin Wright --- mypy/plugins/common.py | 40 ++++++++++++------ mypy/semanal.py | 11 +++++ test-data/unit/check-dataclass-transform.test | 41 +++++++++++++++++++ 3 files changed, 79 insertions(+), 13 deletions(-) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index a2a38f256da3..38109892e09d 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -13,6 +13,7 @@ Expression, FuncDef, JsonDict, + Node, PassStmt, RefExpr, SymbolTableNode, @@ -68,19 +69,7 @@ def _get_argument(call: CallExpr, name: str) -> Expression | None: # # Note: I'm not hard-coding the index so that in the future we can support other # attrib and class makers. - if not isinstance(call.callee, RefExpr): - return None - - callee_type = None - callee_node = call.callee.node - if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: - callee_node_type = get_proper_type(callee_node.type) - if isinstance(callee_node_type, Overloaded): - # We take the last overload. - callee_type = callee_node_type.items[-1] - elif isinstance(callee_node_type, CallableType): - callee_type = callee_node_type - + callee_type = _get_callee_type(call) if not callee_type: return None @@ -94,6 +83,31 @@ def _get_argument(call: CallExpr, name: str) -> Expression | None: return attr_value if attr_name == argument.name: return attr_value + + return None + + +def _get_callee_type(call: CallExpr) -> CallableType | None: + """Return the type of the callee, regardless of its syntatic form.""" + + callee_node: Node | None = call.callee + + if isinstance(callee_node, RefExpr): + callee_node = callee_node.node + + # Some decorators may be using typing.dataclass_transform, which is itself a decorator, so we + # need to unwrap them to get at the true callee + if isinstance(callee_node, Decorator): + callee_node = callee_node.func + + if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: + callee_node_type = get_proper_type(callee_node.type) + if isinstance(callee_node_type, Overloaded): + # We take the last overload. + return callee_node_type.items[-1] + elif isinstance(callee_node_type, CallableType): + return callee_node_type + return None diff --git a/mypy/semanal.py b/mypy/semanal.py index cd5d5a0d808d..6a483edd7c72 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -6646,5 +6646,16 @@ def halt(self, reason: str = ...) -> NoReturn: def is_dataclass_transform_decorator(node: Node | None) -> bool: if isinstance(node, RefExpr): return is_dataclass_transform_decorator(node.node) + if isinstance(node, CallExpr): + # Like dataclasses.dataclass, transform-based decorators can be applied either with or + # without parameters; ie, both of these forms are accepted: + # + # @typing.dataclass_transform + # class Foo: ... + # @typing.dataclass_transform(eq=True, order=True, ...) + # class Bar: ... + # + # We need to unwrap the call for the second variant. + return is_dataclass_transform_decorator(node.callee) return isinstance(node, Decorator) and node.func.is_dataclass_transform diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 4f907e3186b6..1a25c087c5a6 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -44,3 +44,44 @@ Person('Jonh', 21, None) # E: Too many arguments for "Person" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformParametersAreApplied] +# flags: --python-version 3.7 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform() +def my_dataclass(*, eq: bool, order: bool) -> Callable[[Type], Type]: + def transform(cls: Type) -> Type: + return cls + return transform + +@my_dataclass(eq=False, order=True) +class Person: # E: eq must be True if order is True + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('John', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-medium.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformParametersMustBeBoolLiterals] +# flags: --python-version 3.7 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform() +def my_dataclass(*, eq: bool = True, order: bool = False) -> Callable[[Type], Type]: + def transform(cls: Type) -> Type: + return cls + return transform + +BOOL_CONSTANT = True +@my_dataclass(eq=BOOL_CONSTANT) # E: "eq" argument must be True or False. +class A: ... +@my_dataclass(order=not False) # E: "order" argument must be True or False. +class B: ... + +[typing fixtures/typing-medium.pyi] +[builtins fixtures/dataclasses.pyi] From 6b9de5c933bc5f0371d8ff2c0c9e6dc1e51285d2 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 1 Feb 2023 11:02:34 -0800 Subject: [PATCH 228/292] More improvements to getting started docs (#14572) For the most part, this shortens the Getting Started page, which was getting a little too long to read comfortably and had caveats that aren't super important. The cheat sheet does a really great job of "show, don't tell", so recommend that even more aggressively for beginners. The BankAccount example was nice, and the cheat sheet was missing a discussion on inheritance, so move a version of that over there. Finally, most users of mypy don't need to know the details of typeshed and stub files, especially not when getting started. So reframe as a more generic section about types for third party libraries. Linking #13681 --- docs/source/cheat_sheet_py3.rst | 67 ++++++--- docs/source/getting_started.rst | 237 ++++++++------------------------ docs/source/index.rst | 2 +- docs/source/running_mypy.rst | 5 +- docs/source/stubs.rst | 7 +- 5 files changed, 111 insertions(+), 207 deletions(-) diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 7179318e31b8..5aa1770512b8 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -34,7 +34,9 @@ Useful built-in types .. code-block:: python - # For most types, just use the name of the type + # For most types, just use the name of the type. + # Note that mypy can usually infer the type of a variable from its value, + # so technically these annotations are redundant x: int = 1 x: float = 1.0 x: bool = True @@ -100,12 +102,18 @@ Functions def show(value: str, excitement: int = 10) -> None: print(value + "!" * excitement) + # Note that arguments without a type are dynamically typed (treated as Any) + # and that functions without any annotations not checked + def untyped(x): + x.anything() + 1 + "string" # no errors + # This is how you annotate a callable (function) value x: Callable[[int, float], float] = f + def register(callback: Callable[[str], int]) -> None: ... # A generator function that yields ints is secretly just a function that # returns an iterator of ints, so that's how we annotate it - def g(n: int) -> Iterator[int]: + def gen(n: int) -> Iterator[int]: i = 0 while i < n: yield i @@ -143,28 +151,49 @@ Classes .. code-block:: python - class MyClass: - # You can optionally declare instance variables in the class body - attr: int - # This is an instance variable with a default value - charge_percent: int = 100 - + class BankAccount: # The "__init__" method doesn't return anything, so it gets return # type "None" just like any other method that doesn't return anything - def __init__(self) -> None: - ... + def __init__(self, account_name: str, initial_balance: int = 0) -> None: + # mypy will infer the correct types for these instance variables + # based on the types of the parameters. + self.account_name = account_name + self.balance = initial_balance # For instance methods, omit type for "self" - def my_method(self, num: int, str1: str) -> str: - return num * str1 + def deposit(self, amount: int) -> None: + self.balance += amount + + def withdraw(self, amount: int) -> None: + self.balance -= amount # User-defined classes are valid as types in annotations - x: MyClass = MyClass() + account: BankAccount = BankAccount("Alice", 400) + def transfer(src: BankAccount, dst: BankAccount, amount: int) -> None: + src.withdraw(amount) + dst.deposit(amount) + + # Functions that accept BankAccount also accept any subclass of BankAccount! + class AuditedBankAccount(BankAccount): + # You can optionally declare instance variables in the class body + audit_log: list[str] + # This is an instance variable with a default value + auditor_name: str = "The Spanish Inquisition" + + def __init__(self, account_name: str, initial_balance: int = 0) -> None: + super().__init__(account_name, initial_balance) + self.audit_log: list[str] = [] + + def deposit(self, amount: int) -> None: + self.audit_log.append(f"Deposited {amount}") + self.balance += amount + + def withdraw(self, amount: int) -> None: + self.audit_log.append(f"Withdrew {amount}") + self.balance -= amount - # You can also declare the type of an attribute in "__init__" - class Box: - def __init__(self) -> None: - self.items: list[str] = [] + audited = AuditedBankAccount("Bob", 300) + transfer(audited, account, 100) # type checks! # You can use the ClassVar annotation to declare a class variable class Car: @@ -172,9 +201,7 @@ Classes passengers: ClassVar[list[str]] # If you want dynamic attributes on your class, have it - # override "__setattr__" or "__getattr__": - # - "__getattr__" allows for dynamic access to names - # - "__setattr__" allows for dynamic assignment to names + # override "__setattr__" or "__getattr__" class A: # This will allow assignment to any A.x, if x is the same type as "value" # (use "value: Any" to allow arbitrary types) diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index db7c18d5e242..bbe2d25d3b03 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -132,8 +132,8 @@ may be useful. See :ref:`getting-to-strict` for how to build up to ``--strict``. See :ref:`command-line` and :ref:`config-file` for a complete reference on configuration options. -Additional types, and the typing module -*************************************** +More complex types +****************** So far, we've added type hints that use only basic concrete types like ``str`` and ``float``. What if we want to express more complex types, @@ -159,28 +159,11 @@ accept one or more *type parameters*. In this case, we *parameterized* :py:class by writing ``list[str]``. This lets mypy know that ``greet_all`` accepts specifically lists containing strings, and not lists containing ints or any other type. -In Python 3.8 and earlier, you can instead import the -:py:class:`~typing.List` type from the :py:mod:`typing` module: - -.. code-block:: python - - from typing import List # Python 3.8 and earlier - - def greet_all(names: List[str]) -> None: - for name in names: - print('Hello ' + name) - - ... - -You can find many of these more complex static types in the :py:mod:`typing` module. - In the above examples, the type signature is perhaps a little too rigid. After all, there's no reason why this function must accept *specifically* a list -- it would run just fine if you were to pass in a tuple, a set, or any other custom iterable. -You can express this idea using the -:py:class:`collections.abc.Iterable` (or :py:class:`typing.Iterable` in Python -3.8 and earlier) type instead of :py:class:`list` : +You can express this idea using :py:class:`collections.abc.Iterable`: .. code-block:: python @@ -190,8 +173,19 @@ You can express this idea using the for name in names: print('Hello ' + name) +This behavior is actually a fundamental aspect of the PEP 484 type system: when +we annotate some variable with a type ``T``, we are actually telling mypy that +variable can be assigned an instance of ``T``, or an instance of a *subtype* of ``T``. +That is, ``list[str]`` is a subtype of ``Iterable[str]``. + +This also applies to inheritance, so if you have a class ``Child`` that inherits from +``Parent``, then a value of type ``Child`` can be assigned to a variable of type ``Parent``. +For example, a ``RuntimeError`` instance can be passed to a function that is annotated +as taking an ``Exception``. + As another example, suppose you want to write a function that can accept *either* -ints or strings, but no other types. You can express this using the :py:data:`~typing.Union` type: +ints or strings, but no other types. You can express this using the +:py:data:`~typing.Union` type. For example, ``int`` is a subtype of ``Union[int, str]``: .. code-block:: python @@ -203,26 +197,12 @@ ints or strings, but no other types. You can express this using the :py:data:`~t else: return user_id -Similarly, suppose that you want the function to accept only strings or ``None``. You can -again use :py:data:`~typing.Union` and use ``Union[str, None]`` -- or alternatively, use the type -``Optional[str]``. These two types are identical and interchangeable: ``Optional[str]`` -is just a shorthand or *alias* for ``Union[str, None]``. It exists mostly as a convenience -to help function signatures look a little cleaner: +The :py:mod:`typing` module contains many other useful types. -.. code-block:: python +For a quick overview, look through the :ref:`mypy cheatsheet `. - from typing import Optional - - def greeting(name: Optional[str] = None) -> str: - # Optional[str] means the same thing as Union[str, None] - if name is None: - name = 'stranger' - return 'Hello, ' + name - -The :py:mod:`typing` module contains many other useful types. You can find a -quick overview by looking through the :ref:`mypy cheatsheet ` -and a more detailed overview (including information on how to make your own -generic types or your own type aliases) by looking through the +For a detailed overview (including information on how to make your own +generic types or your own type aliases), look through the :ref:`type system reference `. .. note:: @@ -250,10 +230,7 @@ mypy will try and *infer* as many details as possible. We saw an example of this in the ``normalize_id`` function above -- mypy understands basic :py:func:`isinstance ` checks and so can infer that the ``user_id`` variable was of -type ``int`` in the if-branch and of type ``str`` in the else-branch. Similarly, mypy -was able to understand that ``name`` could not possibly be ``None`` in the ``greeting`` -function above, based both on the ``name is None`` check and the variable assignment -in that if statement. +type ``int`` in the if-branch and of type ``str`` in the else-branch. As another example, consider the following function. Mypy can type check this function without a problem: it will use the available context and deduce that ``output`` must be @@ -268,114 +245,16 @@ of type ``list[float]`` and that ``num`` must be of type ``float``: output.append(num) return output -Mypy will warn you if it is unable to determine the type of some variable -- -for example, when assigning an empty dictionary to some global value: - -.. code-block:: python - - my_global_dict = {} # Error: Need type annotation for "my_global_dict" - -You can teach mypy what type ``my_global_dict`` is meant to have by giving it -a type hint. For example, if you knew this variable is supposed to be a dict -of ints to floats, you could annotate it using either variable annotations -(introduced in Python 3.6 by :pep:`526`) or using a comment-based -syntax like so: - -.. code-block:: python - - # If you're using Python 3.9+ - my_global_dict: dict[int, float] = {} - - # If you're using Python 3.6+ - my_global_dict: Dict[int, float] = {} - - -Types and classes -***************** - -So far, we've only seen examples of pre-existing types like the ``int`` -or ``float`` builtins, or generic types from ``collections.abc`` and -``typing``, such as ``Iterable``. However, these aren't the only types you can -use: in fact, you can use any Python class as a type! - -For example, suppose you've defined a custom class representing a bank account: - -.. code-block:: python - - class BankAccount: - # Note: It is ok to omit type hints for the "self" parameter. - # Mypy will infer the correct type. - - def __init__(self, account_name: str, initial_balance: int = 0) -> None: - # Note: Mypy will infer the correct types of your fields - # based on the types of the parameters. - self.account_name = account_name - self.balance = initial_balance - - def deposit(self, amount: int) -> None: - self.balance += amount - - def withdraw(self, amount: int) -> None: - self.balance -= amount - - def overdrawn(self) -> bool: - return self.balance < 0 +For more details, see :ref:`type-inference-and-annotations`. -You can declare that a function will accept any instance of your class -by simply annotating the parameters with ``BankAccount``: - -.. code-block:: python - - def transfer(src: BankAccount, dst: BankAccount, amount: int) -> None: - src.withdraw(amount) - dst.deposit(amount) - - account_1 = BankAccount('Alice', 400) - account_2 = BankAccount('Bob', 200) - transfer(account_1, account_2, 50) - -In fact, the ``transfer`` function we wrote above can accept more then just -instances of ``BankAccount``: it can also accept any instance of a *subclass* -of ``BankAccount``. For example, suppose you write a new class that looks like this: - -.. code-block:: python - - class AuditedBankAccount(BankAccount): - def __init__(self, account_name: str, initial_balance: int = 0) -> None: - super().__init__(account_name, initial_balance) - self.audit_log: list[str] = [] - - def deposit(self, amount: int) -> None: - self.audit_log.append(f"Deposited {amount}") - self.balance += amount - - def withdraw(self, amount: int) -> None: - self.audit_log.append(f"Withdrew {amount}") - self.balance -= amount - -Since ``AuditedBankAccount`` is a subclass of ``BankAccount``, we can directly pass -in instances of it into our ``transfer`` function: - -.. code-block:: python - - audited = AuditedBankAccount('Charlie', 300) - transfer(account_1, audited, 100) # Type checks! - -This behavior is actually a fundamental aspect of the PEP 484 type system: when -we annotate some variable with a type ``T``, we are actually telling mypy that -variable can be assigned an instance of ``T``, or an instance of a *subclass* of ``T``. -The same rule applies to type hints on parameters or fields. - -See :ref:`class-basics` to learn more about how to work with code involving classes. - - -.. _stubs-intro: +Types from libraries +******************** -Stubs files and typeshed -************************ +Mypy can also understand how to work with types from libraries that you use. -Mypy also understands how to work with classes found in the standard library. -For example, here is a function which uses the ``Path`` object from the +For instance, mypy comes out of the box with an intimate knowledge of the +Python standard library. For example, here is a function which uses the +``Path`` object from the `pathlib standard library module `_: .. code-block:: python @@ -383,52 +262,43 @@ For example, here is a function which uses the ``Path`` object from the from pathlib import Path def load_template(template_path: Path, name: str) -> str: - # Mypy understands that 'file_path.read_text()' returns a str... + # Mypy knows that `file_path` has a `read_text` method that returns a str template = template_path.read_text() - - # ...so understands this line type checks. + # ...so it understands this line type checks return template.replace('USERNAME', name) -This behavior may surprise you if you're familiar with how -Python internally works. The standard library does not use type hints -anywhere, so how did mypy know that ``Path.read_text()`` returns a ``str``, -or that ``str.replace(...)`` accepts exactly two ``str`` arguments? +If a third party library you use :ref:`declares support for type checking `, +mypy will type check your use of that library based on the type hints +it contains. -The answer is that mypy comes bundled with *stub files* from the -the `typeshed `_ project, which -contains stub files for the Python builtins, the standard library, -and selected third-party packages. +However, if the third party library does not have type hints, mypy will +complain about missing type information. -A *stub file* is a file containing a skeleton of the public interface -of that Python module, including classes, variables, functions -- and -most importantly, their types. +.. code-block:: text -Mypy complains if it can't find a stub (or a real module) for a -library module that you import. Some modules ship with stubs or inline -annotations that mypy can automatically find, or you can install -additional stubs using pip (see :ref:`fix-missing-imports` and -:ref:`installed-packages` for the details). For example, you can install -the stubs for the ``requests`` package like this: + prog.py:1: error: Library stubs not installed for "yaml" + prog.py:1: note: Hint: "python3 -m pip install types-PyYAML" + prog.py:2: error: Library stubs not installed for "requests" + prog.py:2: note: Hint: "python3 -m pip install types-requests" + ... -.. code-block:: shell +In this case, you can provide mypy a different source of type information, +by installing a *stub* package. A stub package is a package that contains +type hints for another library, but no actual code. - $ python3 -m pip install types-requests +.. code-block:: shell -The stubs are usually packaged in a distribution named -``types-``. Note that the distribution name may be -different from the name of the package that you import. For example, -``types-PyYAML`` contains stubs for the ``yaml`` package. Mypy can -often suggest the name of the stub distribution: + $ python3 -m pip install types-PyYAML types-requests -.. code-block:: text +Stubs packages for a distribution are often named ``types-``. +Note that a distribution name may be different from the name of the package that +you import. For example, ``types-PyYAML`` contains stubs for the ``yaml`` +package. - prog.py:1: error: Library stubs not installed for "yaml" - prog.py:1: note: Hint: "python3 -m pip install types-PyYAML" - ... +For more discussion on strategies for handling errors about libraries without +type information, refer to :ref:`fix-missing-imports`. -You can also :ref:`create -stubs ` easily. We discuss strategies for handling errors -about missing stubs in :ref:`ignore-missing-imports`. +For more information about stubs, see :ref:`stub-files`. Next steps ********** @@ -463,5 +333,8 @@ resources: `mypy issue tracker `_ and typing `Gitter chat `_. +* For general questions about Python typing, try posting at + `typing discussions `_. + You can also continue reading this document and skip sections that aren't relevant for you. You don't need to read sections in order. diff --git a/docs/source/index.rst b/docs/source/index.rst index 27b3a078af6c..3546e1f4efa5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -52,8 +52,8 @@ Contents :caption: First steps getting_started - existing_code cheat_sheet_py3 + existing_code .. _overview-type-system-reference: diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 4a7b5dcf4093..ffc04e6ea14c 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -281,8 +281,9 @@ will continue to be of type ``Any``. line containing the import. 2. To suppress *all* missing import errors from a single library, add - a section to your :ref:`mypy config file ` for that library setting - :confval:`ignore_missing_imports` to True. For example, suppose your codebase + a per-module section to your :ref:`mypy config file ` setting + :confval:`ignore_missing_imports` to True for that library. For example, + suppose your codebase makes heavy use of an (untyped) library named ``foobar``. You can silence all import errors associated with that library and that library alone by adding the following section to your config file:: diff --git a/docs/source/stubs.rst b/docs/source/stubs.rst index af47a0e2afdd..7c84a9718b3e 100644 --- a/docs/source/stubs.rst +++ b/docs/source/stubs.rst @@ -3,12 +3,15 @@ Stub files ========== +A *stub file* is a file containing a skeleton of the public interface +of that Python module, including classes, variables, functions -- and +most importantly, their types. + Mypy uses stub files stored in the `typeshed `_ repository to determine the types of standard library and third-party library functions, classes, and other definitions. You can also create your own stubs that will be -used to type check your code. The basic properties of stubs were introduced -back in :ref:`stubs-intro`. +used to type check your code. Creating a stub *************** From 7a50c05a261e4827395eeb0faa444c72332d3bd9 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 1 Feb 2023 12:19:41 -0800 Subject: [PATCH 229/292] Fix passenv for tox 4 (#14578) Fixes #14522 --- tox.ini | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index a155ec726386..df7784f0731f 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,11 @@ isolated_build = true [testenv] description = run the test driver with {basepython} -passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) PYTEST_ADDOPTS +passenv = + PYTEST_XDIST_WORKER_COUNT + PROGRAMDATA + PROGRAMFILES(X86) + PYTEST_ADDOPTS deps = -rtest-requirements.txt commands = python -m pytest {posargs} From 168fc1e9993fe94e7cfa278c0169f93633418ca3 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 1 Feb 2023 14:03:57 -0800 Subject: [PATCH 230/292] Upgrade to tox v4 (#14579) --- .github/workflows/docs.yml | 6 +++--- .github/workflows/test.yml | 12 ++++++------ CONTRIBUTING.md | 6 +++--- docs/source/existing_code.rst | 2 +- tox.ini | 7 +++++-- 5 files changed, 18 insertions(+), 15 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 9f984e3a346b..a3294c08a79c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,8 +26,8 @@ jobs: with: python-version: '3.7' - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 - name: Setup tox environment - run: tox -e ${{ env.TOXENV }} --notest + run: tox run -e ${{ env.TOXENV }} --notest - name: Test - run: tox -e ${{ env.TOXENV }} --skip-pkg-install + run: tox run -e ${{ env.TOXENV }} --skip-pkg-install diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a02378cc01ab..e7072f5369c2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -127,16 +127,16 @@ jobs: ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV source $VENV/bin/activate - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 - name: Compiled with mypyc if: ${{ matrix.test_mypyc }} run: | pip install -r test-requirements.txt CC=clang MYPYC_OPT_LEVEL=0 MYPY_USE_MYPYC=1 pip install -e . - name: Setup tox environment - run: tox -e ${{ matrix.toxenv }} --notest + run: tox run -e ${{ matrix.toxenv }} --notest - name: Test - run: tox -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} + run: tox run -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} python-nightly: runs-on: ubuntu-latest @@ -147,11 +147,11 @@ jobs: with: python-version: '3.12-dev' - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 - name: Setup tox environment - run: tox -e py --notest + run: tox run -e py --notest - name: Test - run: tox -e py --skip-pkg-install -- "-n 2" + run: tox run -e py --skip-pkg-install -- "-n 2" continue-on-error: true - name: Mark as a success run: exit 0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 193c9f27c85b..2b2e6cdb9734 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -53,13 +53,13 @@ python3 runtests.py You can also use `tox` to run tests (`tox` handles setting up the test environment for you): ``` -tox -e py +tox run -e py # Or some specific python version: -tox -e py39 +tox run -e py39 # Or some specific command: -tox -e lint +tox run -e lint ``` Some useful commands for running specific tests include: diff --git a/docs/source/existing_code.rst b/docs/source/existing_code.rst index 5b1fda40f2d6..410d7af0c350 100644 --- a/docs/source/existing_code.rst +++ b/docs/source/existing_code.rst @@ -51,7 +51,7 @@ A simple CI script could look something like this: python3 -m pip install mypy==0.971 # Run your standardised mypy invocation, e.g. mypy my_project - # This could also look like `scripts/run_mypy.sh`, `tox -e mypy`, `make mypy`, etc + # This could also look like `scripts/run_mypy.sh`, `tox run -e mypy`, `make mypy`, etc Ignoring errors from certain modules ------------------------------------ diff --git a/tox.ini b/tox.ini index df7784f0731f..443f05dc8bcf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -minversion = 3.8.0 +minversion = 4.4.4 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} envlist = py37, @@ -30,7 +30,10 @@ commands = [testenv:type] description = type check ourselves -passenv = TERM MYPY_FORCE_COLOR MYPY_FORCE_TERMINAL_WIDTH +passenv = + TERM + MYPY_FORCE_COLOR + MYPY_FORCE_TERMINAL_WIDTH commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py From 154fee110f274fe6214eff856b65d437ee299fdb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 1 Feb 2023 23:28:15 +0000 Subject: [PATCH 231/292] Various documentation and error message tweaks (#14574) I looked at `git diff v0.991 master -- docs` and did some editing. There no major changes to content. Also updated one error message. --- docs/source/error_code_list2.rst | 42 +++++++++++++++--------- docs/source/generics.rst | 51 +++++++++++++++++------------- docs/source/index.rst | 2 +- mypy/semanal.py | 2 +- test-data/unit/check-selftype.test | 4 +-- 5 files changed, 60 insertions(+), 41 deletions(-) diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 85ab76da5cee..f160515f0a9e 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -85,8 +85,11 @@ Example: Check that methods do not have redundant Self annotations [redundant-self] -------------------------------------------------------------------------- -Such annotations are allowed by :pep:`673` but are redundant, so if you want -warnings about them, enable this error code. +If a method uses the ``Self`` type in the return type or the type of a +non-self argument, there is no need to annotate the ``self`` argument +explicitly. Such annotations are allowed by :pep:`673` but are +redundant. If you enable this error code, mypy will generate an error if +there is a redundant ``Self`` type. Example: @@ -97,7 +100,7 @@ Example: from typing import Self class C: - # Error: Redundant Self annotation on method first argument + # Error: Redundant "Self" annotation for the first method argument def copy(self: Self) -> Self: return type(self)() @@ -236,29 +239,34 @@ mypy generates an error if it thinks that an expression is redundant. Check that expression is not implicitly true in boolean context [truthy-bool] ----------------------------------------------------------------------------- -Warn when an expression whose type does not implement ``__bool__`` or ``__len__`` is used in boolean context, -since unless implemented by a sub-type, the expression will always evaluate to true. +Warn when the type of an expression in a boolean context does not +implement ``__bool__`` or ``__len__``. Unless one of these is +implemented by a subtype, the expression will always be considered +true, and there may be a bug in the condition. + +As an exception, the ``object`` type is allowed in a boolean context. +Using an iterable value in a boolean context has a separate error code +(see below). .. code-block:: python # Use "mypy --enable-error-code truthy-bool ..." class Foo: - pass + pass foo = Foo() # Error: "foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context if foo: - ... - -The check is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), -except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, -while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. + ... Check that iterable is not implicitly true in boolean context [truthy-iterable] ------------------------------------------------------------------------------- -``Iterable`` does not implement ``__len__`` and so this code will be flagged: +Generate an error if a value of type ``Iterable`` is used as a boolean +condition, since ``Iterable`` does not implement ``__len__`` or ``__bool__``. + +Example: .. code-block:: python @@ -270,9 +278,13 @@ Check that iterable is not implicitly true in boolean context [truthy-iterable] return [42] return [x + 1 for x in items] -If called with a ``Generator`` like ``int(x) for x in []``, this function would not return ``[42]`` unlike -what the author might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, -and so there is no error in practice. In such case, it is recommended to annotate ``items: Collection[int]``. +If ``transform`` is called with a ``Generator`` argument, such as +``int(x) for x in []``, this function would not return ``[42]`` unlike +what might be intended. Of course, it's possible that ``transform`` is +only called with ``list`` or other container objects, and the ``if not +items`` check is actually valid. If that is the case, it is +recommended to annotate ``items`` as ``Collection[int]`` instead of +``Iterable[int]``. .. _ignore-without-code: diff --git a/docs/source/generics.rst b/docs/source/generics.rst index a867bc863c83..b8fefd27870f 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -262,10 +262,11 @@ Generic methods and generic self ******************************** You can also define generic methods — just use a type variable in the -method signature that is different from class type variables. In particular, -``self`` may also be generic, allowing a method to return the most precise -type known at the point of access. In this way, for example, you can typecheck -chaining of setter methods: +method signature that is different from class type variables. In +particular, the ``self`` argument may also be generic, allowing a +method to return the most precise type known at the point of access. +In this way, for example, you can type check a chain of setter +methods: .. code-block:: python @@ -291,7 +292,9 @@ chaining of setter methods: circle: Circle = Circle().set_scale(0.5).set_radius(2.7) square: Square = Square().set_scale(0.5).set_width(3.2) -Without using generic ``self``, the last two lines could not be type-checked properly. +Without using generic ``self``, the last two lines could not be type +checked properly, since the return type of ``set_scale`` would be +``Shape``, which doesn't define ``set_radius`` or ``set_width``. Other uses are factory methods, such as copy and deserialization. For class methods, you can also define generic ``cls``, using :py:class:`Type[T] `: @@ -324,16 +327,18 @@ In the latter case, you must implement this method in all future subclasses. Note also that mypy cannot always verify that the implementation of a copy or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), -possibly by making use of the ``Any`` type. +possibly by making use of the ``Any`` type or a ``# type: ignore`` comment. -Note that this feature may accept some unsafe code for the purpose of -*practicality*. For example: +Note that mypy lets you use generic self types in certain unsafe ways +in order to support common idioms. For example, using a generic +self type in an argument type is accepted even though it's unsafe: .. code-block:: python from typing import TypeVar T = TypeVar("T") + class Base: def compare(self: T, other: T) -> bool: return False @@ -342,25 +347,27 @@ Note that this feature may accept some unsafe code for the purpose of def __init__(self, x: int) -> None: self.x = x - # This is unsafe (see below), but allowed because it is - # a common pattern, and rarely causes issues in practice. + # This is unsafe (see below) but allowed because it's + # a common pattern and rarely causes issues in practice. def compare(self, other: Sub) -> bool: return self.x > other.x b: Base = Sub(42) b.compare(Base()) # Runtime error here: 'Base' object has no attribute 'x' -For some advanced uses of self-types see :ref:`additional examples `. +For some advanced uses of self types, see :ref:`additional examples `. Automatic self types using typing.Self ************************************** -The patterns described above are quite common, so there is a syntactic sugar -for them introduced in :pep:`673`. Instead of defining a type variable and -using an explicit ``self`` annotation, you can import a magic type ``typing.Self`` -that is automatically transformed into a type variable with an upper bound of -current class, and you don't need an annotation for ``self`` (or ``cls`` for -class methods). The above example can thus be rewritten as: +Since the patterns described above are quite common, mypy supports a +simpler syntax, introduced in :pep:`673`, to make them easier to use. +Instead of defining a type variable and using an explicit annotation +for ``self``, you can import the special type ``typing.Self`` that is +automatically transformed into a type variable with the current class +as the upper bound, and you don't need an annotation for ``self`` (or +``cls`` in class methods). The example from the previous section can +be made simpler by using ``Self``: .. code-block:: python @@ -381,13 +388,13 @@ class methods). The above example can thus be rewritten as: a, b = SuperFriend.make_pair() -This is more compact than using explicit type variables, plus additionally -you can use ``Self`` in attribute annotations, not just in methods. +This is more compact than using explicit type variables. Also, you can +use ``Self`` in attribute annotations in addition to methods. .. note:: - To use this feature on versions of Python before 3.11, you will need to - import ``Self`` from ``typing_extensions`` version 4.0 or newer. + To use this feature on Python versions earlier than 3.11, you will need to + import ``Self`` from ``typing_extensions`` (version 4.0 or newer). .. _variance-of-generics: @@ -916,5 +923,5 @@ defeating the purpose of using aliases. Example: OIntVec = Optional[Vec[int]] -Using type variable bounds or values in generic aliases, has the same effect +Using type variable bounds or values in generic aliases has the same effect as in generic classes/functions. diff --git a/docs/source/index.rst b/docs/source/index.rst index 3546e1f4efa5..1c199dfc5ec2 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -39,7 +39,7 @@ understand, debug, and maintain. .. note:: - Although mypy is production ready, there will be occasional changes + Although mypy is production ready, there may be occasional changes that break backward compatibility. The mypy development team tries to minimize the impact of changes to user code. In case of a major breaking change, mypy's major version will be bumped. diff --git a/mypy/semanal.py b/mypy/semanal.py index 6a483edd7c72..31abc8c1a515 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -986,7 +986,7 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: # This error is off by default, since it is explicitly allowed # by the PEP 673. self.fail( - "Redundant Self annotation on method first argument", + 'Redundant "Self" annotation for the first method argument', func, code=codes.REDUNDANT_SELF_TYPE, ) diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 2d45d28764a0..555cef3641f8 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1627,13 +1627,13 @@ class C: from typing import Self, Type class C: - def copy(self: Self) -> Self: # E: Redundant Self annotation on method first argument + def copy(self: Self) -> Self: # E: Redundant "Self" annotation for the first method argument d: Defer class Defer: ... return self @classmethod - def g(cls: Type[Self]) -> Self: # E: Redundant Self annotation on method first argument + def g(cls: Type[Self]) -> Self: # E: Redundant "Self" annotation for the first method argument d: DeferAgain class DeferAgain: ... return cls() From ca2694f125e174fe84e45008cc63dea802643bc4 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 05:39:08 -0800 Subject: [PATCH 232/292] Improve some dynamic typing docs (#14576) Linking #13681 --- docs/source/dynamic_typing.rst | 80 +++++++++++++++---- docs/source/getting_started.rst | 2 + docs/source/running_mypy.rst | 17 ++-- .../source/type_inference_and_annotations.rst | 42 +++++++++- 4 files changed, 119 insertions(+), 22 deletions(-) diff --git a/docs/source/dynamic_typing.rst b/docs/source/dynamic_typing.rst index 390bc52d9e2c..d3476de2ca64 100644 --- a/docs/source/dynamic_typing.rst +++ b/docs/source/dynamic_typing.rst @@ -4,27 +4,39 @@ Dynamically typed code ====================== -As mentioned earlier, bodies of functions that don't have any explicit -types in their function annotation are dynamically typed (operations -are checked at runtime). Code outside functions is statically typed by -default, and types of variables are inferred. This does usually the -right thing, but you can also make any variable dynamically typed by -defining it explicitly with the type ``Any``: +In :ref:`getting-started-dynamic-vs-static`, we discussed how bodies of functions +that don't have any explicit type annotations in their function are "dynamically typed" +and that mypy will not check them. In this section, we'll talk a little bit more +about what that means and how you can enable dynamic typing on a more fine grained basis. + +In cases where your code is too magical for mypy to understand, you can make a +variable or parameter dynamically typed by explicitly giving it the type +``Any``. Mypy will let you do basically anything with a value of type ``Any``, +including assigning a value of type ``Any`` to a variable of any type (or vice +versa). .. code-block:: python from typing import Any - s = 1 # Statically typed (type int) - d: Any = 1 # Dynamically typed (type Any) - s = 'x' # Type check error - d = 'x' # OK + num = 1 # Statically typed (inferred to be int) + num = 'x' # error: Incompatible types in assignment (expression has type "str", variable has type "int") + + dyn: Any = 1 # Dynamically typed (type Any) + dyn = 'x' # OK + + num = dyn # No error, mypy will let you assign a value of type Any to any variable + num += 1 # Oops, mypy still thinks num is an int + +You can think of ``Any`` as a way to locally disable type checking. +See :ref:`silencing-type-errors` for other ways you can shut up +the type checker. Operations on Any values ------------------------ -You can do anything using a value with type ``Any``, and type checker -does not complain: +You can do anything using a value with type ``Any``, and the type checker +will not complain: .. code-block:: python @@ -37,7 +49,7 @@ does not complain: open(x).read() return x -Values derived from an ``Any`` value also often have the type ``Any`` +Values derived from an ``Any`` value also usually have the type ``Any`` implicitly, as mypy can't infer a more precise result type. For example, if you get the attribute of an ``Any`` value or call a ``Any`` value the result is ``Any``: @@ -45,12 +57,45 @@ example, if you get the attribute of an ``Any`` value or call a .. code-block:: python def f(x: Any) -> None: - y = x.foo() # y has type Any - y.bar() # Okay as well! + y = x.foo() + reveal_type(y) # Revealed type is "Any" + z = y.bar("mypy will let you do anything to y") + reveal_type(z) # Revealed type is "Any" ``Any`` types may propagate through your program, making type checking less effective, unless you are careful. +Function parameters without annotations are also implicitly ``Any``: + +.. code-block:: python + + def f(x) -> None: + reveal_type(x) # Revealed type is "Any" + x.can.do["anything", x]("wants", 2) + +You can make mypy warn you about untyped function parameters using the +:option:`--disallow-untyped-defs ` flag. + +Generic types missing type parameters will have those parameters implicitly +treated as ``Any``: + +.. code-block:: python + + from typing import List + + def f(x: List) -> None: + reveal_type(x) # Revealed type is "builtins.list[Any]" + reveal_type(x[0]) # Revealed type is "Any" + x[0].anything_goes() # OK + +You can make mypy warn you about untyped function parameters using the +:option:`--disallow-any-generics ` flag. + +Finally, another major source of ``Any`` types leaking into your program is from +third party libraries that mypy does not know about. This is particularly the case +when using the :option:`--ignore-missing-imports ` +flag. See :ref:`fix-missing-imports` for more information about this. + Any vs. object -------------- @@ -80,6 +125,11 @@ operations: n: int = 1 n = o # Error! + +If you're not sure whether you need to use :py:class:`object` or ``Any``, use +:py:class:`object` -- only switch to using ``Any`` if you get a type checker +complaint. + You can use different :ref:`type narrowing ` techniques to narrow :py:class:`object` to a more specific type (subtype) such as ``int``. Type narrowing is not needed with diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index bbe2d25d3b03..9b927097cfd2 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -44,6 +44,8 @@ easy to adopt mypy incrementally. In order to get useful diagnostics from mypy, you must add *type annotations* to your code. See the section below for details. +.. _getting-started-dynamic-vs-static: + Dynamic vs static typing ************************ diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index ffc04e6ea14c..c5222d9d5f47 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -228,6 +228,11 @@ attribute of the module will automatically succeed: # But this type checks, and x will have type 'Any' x = does_not_exist.foobar() +This can result in mypy failing to warn you about errors in your code. Since +operations on ``Any`` result in ``Any``, these dynamic types can propagate +through your code, making type checking less effective. See +:ref:`dynamic-typing` for more information. + The next sections describe what each of these errors means and recommended next steps; scroll to the section that matches your error. @@ -245,7 +250,7 @@ unless they either have declared themselves to be themselves on `typeshed `_, the repository of types for the standard library and some 3rd party libraries. -If you are getting this error, try: +If you are getting this error, try to obtain type hints for the library you're using: 1. Upgrading the version of the library you're using, in case a newer version has started to include type hints. @@ -264,7 +269,7 @@ If you are getting this error, try: adding the location to the ``MYPYPATH`` environment variable. These stub files do not need to be complete! A good strategy is to use - stubgen, a program that comes bundled with mypy, to generate a first + :ref:`stubgen `, a program that comes bundled with mypy, to generate a first rough draft of the stubs. You can then iterate on just the parts of the library you need. @@ -273,9 +278,11 @@ If you are getting this error, try: :ref:`PEP 561 compliant packages `. If you are unable to find any existing type hints nor have time to write your -own, you can instead *suppress* the errors. All this will do is make mypy stop -reporting an error on the line containing the import: the imported module -will continue to be of type ``Any``. +own, you can instead *suppress* the errors. + +All this will do is make mypy stop reporting an error on the line containing the +import: the imported module will continue to be of type ``Any``, and mypy may +not catch errors in its use. 1. To suppress a *single* missing import error, add a ``# type: ignore`` at the end of the line containing the import. diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst index 5c58d56d85a1..6adb4e651224 100644 --- a/docs/source/type_inference_and_annotations.rst +++ b/docs/source/type_inference_and_annotations.rst @@ -185,6 +185,8 @@ Working around the issue is easy by adding a type annotation: a: list[int] = [] # OK foo(a) +.. _silencing-type-errors: + Silencing type errors ********************* @@ -228,6 +230,8 @@ short explanation of the bug. To do that, use this format: # Starting app on http://localhost:8000 app.run(8000) # type: ignore # `run()` in v2.0 accepts an `int`, as a port +Type ignore error codes +----------------------- By default, mypy displays an error code for each error: @@ -240,7 +244,21 @@ It is possible to add a specific error-code in your ignore comment (e.g. ``# type: ignore[attr-defined]``) to clarify what's being silenced. You can find more information about error codes :ref:`here `. -Similarly, you can also ignore all mypy errors in a file, by adding a +Other ways to silence errors +---------------------------- + +You can get mypy to silence errors about a specific variable by dynamically +typing it with ``Any``. See :ref:`dynamic-typing` for more information. + +.. code-block:: python + + from typing import Any + + def f(x: Any, y: str) -> None: + x = 'hello' + x += 1 # OK + +You can ignore all mypy errors in a file by adding a ``# mypy: ignore-errors`` at the top of the file: .. code-block:: python @@ -250,8 +268,28 @@ Similarly, you can also ignore all mypy errors in a file, by adding a import unittest ... +You can also specify per-module configuration options in your :ref:`config-file`. +For example: + +.. code-block:: ini + + # Don't report errors in the 'package_to_fix_later' package + [mypy-package_to_fix_later.*] + ignore_errors = True + + # Disable specific error codes in the 'tests' package + # Also don't require type annotations + [mypy-tests.*] + disable_error_code = var-annotated, has-type + allow_untyped_defs = True + + # Silence import errors from the 'library_missing_types' package + [mypy-library_missing_types.*] + ignore_missing_imports = True + Finally, adding a ``@typing.no_type_check`` decorator to a class, method or -function has the effect of ignoring that class, method or function. +function causes mypy to avoid type checking that class, method or function +and to treat it as not having any type annotations. .. code-block:: python From 3f139390547fc3876494402948ca472388c25737 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 11:54:59 -0800 Subject: [PATCH 233/292] Make a top-level TypedDict page (#14584) This just moves content around (with minimal editing to make the moves make sense). TypedDict has been the target for several features, including some that are not yet documented. There was another PEP drafted today that was TypedDict themed. It's also pretty popular with users. Linking https://github.com/python/mypy/issues/13681 --- docs/source/index.rst | 1 + docs/source/more_types.rst | 254 +------------------------------------ docs/source/typed_dict.rst | 250 ++++++++++++++++++++++++++++++++++++ 3 files changed, 252 insertions(+), 253 deletions(-) create mode 100644 docs/source/typed_dict.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 1c199dfc5ec2..7ab3edebad39 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -74,6 +74,7 @@ Contents generics more_types literal_types + typed_dict final_attrs metaclasses diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 722909a038b5..ff5e8d384351 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -2,7 +2,7 @@ More types ========== This section introduces a few additional kinds of types, including :py:data:`~typing.NoReturn`, -:py:func:`NewType `, ``TypedDict``, and types for async code. It also discusses +:py:func:`NewType `, and types for async code. It also discusses how to give functions more precise types using overloads. All of these are only situationally useful, so feel free to skip this section and come back when you have a need for some of them. @@ -20,9 +20,6 @@ Here's a quick summary of what's covered here: signatures. This is useful if you need to encode a relationship between the arguments and the return type that would be difficult to express normally. -* ``TypedDict`` lets you give precise types for dictionaries that represent - objects with a fixed schema, such as ``{'id': 1, 'items': ['x']}``. - * Async types let you type check programs using ``async`` and ``await``. .. _noreturn: @@ -949,252 +946,3 @@ generator type as the return type: loop = asyncio.get_event_loop() loop.run_until_complete(countdown_2("USS Enterprise", 5)) loop.close() - - -.. _typeddict: - -TypedDict -********* - -Python programs often use dictionaries with string keys to represent objects. -Here is a typical example: - -.. code-block:: python - - movie = {'name': 'Blade Runner', 'year': 1982} - -Only a fixed set of string keys is expected (``'name'`` and -``'year'`` above), and each key has an independent value type (``str`` -for ``'name'`` and ``int`` for ``'year'`` above). We've previously -seen the ``dict[K, V]`` type, which lets you declare uniform -dictionary types, where every value has the same type, and arbitrary keys -are supported. This is clearly not a good fit for -``movie`` above. Instead, you can use a ``TypedDict`` to give a precise -type for objects like ``movie``, where the type of each -dictionary value depends on the key: - -.. code-block:: python - - from typing_extensions import TypedDict - - Movie = TypedDict('Movie', {'name': str, 'year': int}) - - movie: Movie = {'name': 'Blade Runner', 'year': 1982} - -``Movie`` is a ``TypedDict`` type with two items: ``'name'`` (with type ``str``) -and ``'year'`` (with type ``int``). Note that we used an explicit type -annotation for the ``movie`` variable. This type annotation is -important -- without it, mypy will try to infer a regular, uniform -:py:class:`dict` type for ``movie``, which is not what we want here. - -.. note:: - - If you pass a ``TypedDict`` object as an argument to a function, no - type annotation is usually necessary since mypy can infer the - desired type based on the declared argument type. Also, if an - assignment target has been previously defined, and it has a - ``TypedDict`` type, mypy will treat the assigned value as a ``TypedDict``, - not :py:class:`dict`. - -Now mypy will recognize these as valid: - -.. code-block:: python - - name = movie['name'] # Okay; type of name is str - year = movie['year'] # Okay; type of year is int - -Mypy will detect an invalid key as an error: - -.. code-block:: python - - director = movie['director'] # Error: 'director' is not a valid key - -Mypy will also reject a runtime-computed expression as a key, as -it can't verify that it's a valid key. You can only use string -literals as ``TypedDict`` keys. - -The ``TypedDict`` type object can also act as a constructor. It -returns a normal :py:class:`dict` object at runtime -- a ``TypedDict`` does -not define a new runtime type: - -.. code-block:: python - - toy_story = Movie(name='Toy Story', year=1995) - -This is equivalent to just constructing a dictionary directly using -``{ ... }`` or ``dict(key=value, ...)``. The constructor form is -sometimes convenient, since it can be used without a type annotation, -and it also makes the type of the object explicit. - -Like all types, ``TypedDict``\s can be used as components to build -arbitrarily complex types. For example, you can define nested -``TypedDict``\s and containers with ``TypedDict`` items. -Unlike most other types, mypy uses structural compatibility checking -(or structural subtyping) with ``TypedDict``\s. A ``TypedDict`` object with -extra items is compatible with (a subtype of) a narrower -``TypedDict``, assuming item types are compatible (*totality* also affects -subtyping, as discussed below). - -A ``TypedDict`` object is not a subtype of the regular ``dict[...]`` -type (and vice versa), since :py:class:`dict` allows arbitrary keys to be -added and removed, unlike ``TypedDict``. However, any ``TypedDict`` object is -a subtype of (that is, compatible with) ``Mapping[str, object]``, since -:py:class:`~typing.Mapping` only provides read-only access to the dictionary items: - -.. code-block:: python - - def print_typed_dict(obj: Mapping[str, object]) -> None: - for key, value in obj.items(): - print(f'{key}: {value}') - - print_typed_dict(Movie(name='Toy Story', year=1995)) # OK - -.. note:: - - Unless you are on Python 3.8 or newer (where ``TypedDict`` is available in - standard library :py:mod:`typing` module) you need to install ``typing_extensions`` - using pip to use ``TypedDict``: - - .. code-block:: text - - python3 -m pip install --upgrade typing-extensions - -Totality --------- - -By default mypy ensures that a ``TypedDict`` object has all the specified -keys. This will be flagged as an error: - -.. code-block:: python - - # Error: 'year' missing - toy_story: Movie = {'name': 'Toy Story'} - -Sometimes you want to allow keys to be left out when creating a -``TypedDict`` object. You can provide the ``total=False`` argument to -``TypedDict(...)`` to achieve this: - -.. code-block:: python - - GuiOptions = TypedDict( - 'GuiOptions', {'language': str, 'color': str}, total=False) - options: GuiOptions = {} # Okay - options['language'] = 'en' - -You may need to use :py:meth:`~dict.get` to access items of a partial (non-total) -``TypedDict``, since indexing using ``[]`` could fail at runtime. -However, mypy still lets use ``[]`` with a partial ``TypedDict`` -- you -just need to be careful with it, as it could result in a :py:exc:`KeyError`. -Requiring :py:meth:`~dict.get` everywhere would be too cumbersome. (Note that you -are free to use :py:meth:`~dict.get` with total ``TypedDict``\s as well.) - -Keys that aren't required are shown with a ``?`` in error messages: - -.. code-block:: python - - # Revealed type is "TypedDict('GuiOptions', {'language'?: builtins.str, - # 'color'?: builtins.str})" - reveal_type(options) - -Totality also affects structural compatibility. You can't use a partial -``TypedDict`` when a total one is expected. Also, a total ``TypedDict`` is not -valid when a partial one is expected. - -Supported operations --------------------- - -``TypedDict`` objects support a subset of dictionary operations and methods. -You must use string literals as keys when calling most of the methods, -as otherwise mypy won't be able to check that the key is valid. List -of supported operations: - -* Anything included in :py:class:`~typing.Mapping`: - - * ``d[key]`` - * ``key in d`` - * ``len(d)`` - * ``for key in d`` (iteration) - * :py:meth:`d.get(key[, default]) ` - * :py:meth:`d.keys() ` - * :py:meth:`d.values() ` - * :py:meth:`d.items() ` - -* :py:meth:`d.copy() ` -* :py:meth:`d.setdefault(key, default) ` -* :py:meth:`d1.update(d2) ` -* :py:meth:`d.pop(key[, default]) ` (partial ``TypedDict``\s only) -* ``del d[key]`` (partial ``TypedDict``\s only) - -.. note:: - - :py:meth:`~dict.clear` and :py:meth:`~dict.popitem` are not supported since they are unsafe - -- they could delete required ``TypedDict`` items that are not visible to - mypy because of structural subtyping. - -Class-based syntax ------------------- - -An alternative, class-based syntax to define a ``TypedDict`` is supported -in Python 3.6 and later: - -.. code-block:: python - - from typing_extensions import TypedDict - - class Movie(TypedDict): - name: str - year: int - -The above definition is equivalent to the original ``Movie`` -definition. It doesn't actually define a real class. This syntax also -supports a form of inheritance -- subclasses can define additional -items. However, this is primarily a notational shortcut. Since mypy -uses structural compatibility with ``TypedDict``\s, inheritance is not -required for compatibility. Here is an example of inheritance: - -.. code-block:: python - - class Movie(TypedDict): - name: str - year: int - - class BookBasedMovie(Movie): - based_on: str - -Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. - -Mixing required and non-required items --------------------------------------- - -In addition to allowing reuse across ``TypedDict`` types, inheritance also allows -you to mix required and non-required (using ``total=False``) items -in a single ``TypedDict``. Example: - -.. code-block:: python - - class MovieBase(TypedDict): - name: str - year: int - - class Movie(MovieBase, total=False): - based_on: str - -Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` -can be left out when constructing an object. A ``TypedDict`` with a mix of required -and non-required keys, such as ``Movie`` above, will only be compatible with -another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the -first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys -in the first ``TypedDict``. - -Unions of TypedDicts --------------------- - -Since TypedDicts are really just regular dicts at runtime, it is not possible to -use ``isinstance`` checks to distinguish between different variants of a Union of -TypedDict in the same way you can with regular objects. - -Instead, you can use the :ref:`tagged union pattern `. The referenced -section of the docs has a full description with an example, but in short, you will -need to give each TypedDict the same key where each value has a unique -:ref:`Literal type `. Then, check that key to distinguish -between your TypedDicts. diff --git a/docs/source/typed_dict.rst b/docs/source/typed_dict.rst new file mode 100644 index 000000000000..19a717d7feb7 --- /dev/null +++ b/docs/source/typed_dict.rst @@ -0,0 +1,250 @@ +.. _typeddict: + +TypedDict +********* + +Python programs often use dictionaries with string keys to represent objects. +``TypedDict`` lets you give precise types for dictionaries that represent +objects with a fixed schema, such as ``{'id': 1, 'items': ['x']}``. + +Here is a typical example: + +.. code-block:: python + + movie = {'name': 'Blade Runner', 'year': 1982} + +Only a fixed set of string keys is expected (``'name'`` and +``'year'`` above), and each key has an independent value type (``str`` +for ``'name'`` and ``int`` for ``'year'`` above). We've previously +seen the ``dict[K, V]`` type, which lets you declare uniform +dictionary types, where every value has the same type, and arbitrary keys +are supported. This is clearly not a good fit for +``movie`` above. Instead, you can use a ``TypedDict`` to give a precise +type for objects like ``movie``, where the type of each +dictionary value depends on the key: + +.. code-block:: python + + from typing_extensions import TypedDict + + Movie = TypedDict('Movie', {'name': str, 'year': int}) + + movie: Movie = {'name': 'Blade Runner', 'year': 1982} + +``Movie`` is a ``TypedDict`` type with two items: ``'name'`` (with type ``str``) +and ``'year'`` (with type ``int``). Note that we used an explicit type +annotation for the ``movie`` variable. This type annotation is +important -- without it, mypy will try to infer a regular, uniform +:py:class:`dict` type for ``movie``, which is not what we want here. + +.. note:: + + If you pass a ``TypedDict`` object as an argument to a function, no + type annotation is usually necessary since mypy can infer the + desired type based on the declared argument type. Also, if an + assignment target has been previously defined, and it has a + ``TypedDict`` type, mypy will treat the assigned value as a ``TypedDict``, + not :py:class:`dict`. + +Now mypy will recognize these as valid: + +.. code-block:: python + + name = movie['name'] # Okay; type of name is str + year = movie['year'] # Okay; type of year is int + +Mypy will detect an invalid key as an error: + +.. code-block:: python + + director = movie['director'] # Error: 'director' is not a valid key + +Mypy will also reject a runtime-computed expression as a key, as +it can't verify that it's a valid key. You can only use string +literals as ``TypedDict`` keys. + +The ``TypedDict`` type object can also act as a constructor. It +returns a normal :py:class:`dict` object at runtime -- a ``TypedDict`` does +not define a new runtime type: + +.. code-block:: python + + toy_story = Movie(name='Toy Story', year=1995) + +This is equivalent to just constructing a dictionary directly using +``{ ... }`` or ``dict(key=value, ...)``. The constructor form is +sometimes convenient, since it can be used without a type annotation, +and it also makes the type of the object explicit. + +Like all types, ``TypedDict``\s can be used as components to build +arbitrarily complex types. For example, you can define nested +``TypedDict``\s and containers with ``TypedDict`` items. +Unlike most other types, mypy uses structural compatibility checking +(or structural subtyping) with ``TypedDict``\s. A ``TypedDict`` object with +extra items is compatible with (a subtype of) a narrower +``TypedDict``, assuming item types are compatible (*totality* also affects +subtyping, as discussed below). + +A ``TypedDict`` object is not a subtype of the regular ``dict[...]`` +type (and vice versa), since :py:class:`dict` allows arbitrary keys to be +added and removed, unlike ``TypedDict``. However, any ``TypedDict`` object is +a subtype of (that is, compatible with) ``Mapping[str, object]``, since +:py:class:`~typing.Mapping` only provides read-only access to the dictionary items: + +.. code-block:: python + + def print_typed_dict(obj: Mapping[str, object]) -> None: + for key, value in obj.items(): + print(f'{key}: {value}') + + print_typed_dict(Movie(name='Toy Story', year=1995)) # OK + +.. note:: + + Unless you are on Python 3.8 or newer (where ``TypedDict`` is available in + standard library :py:mod:`typing` module) you need to install ``typing_extensions`` + using pip to use ``TypedDict``: + + .. code-block:: text + + python3 -m pip install --upgrade typing-extensions + +Totality +-------- + +By default mypy ensures that a ``TypedDict`` object has all the specified +keys. This will be flagged as an error: + +.. code-block:: python + + # Error: 'year' missing + toy_story: Movie = {'name': 'Toy Story'} + +Sometimes you want to allow keys to be left out when creating a +``TypedDict`` object. You can provide the ``total=False`` argument to +``TypedDict(...)`` to achieve this: + +.. code-block:: python + + GuiOptions = TypedDict( + 'GuiOptions', {'language': str, 'color': str}, total=False) + options: GuiOptions = {} # Okay + options['language'] = 'en' + +You may need to use :py:meth:`~dict.get` to access items of a partial (non-total) +``TypedDict``, since indexing using ``[]`` could fail at runtime. +However, mypy still lets use ``[]`` with a partial ``TypedDict`` -- you +just need to be careful with it, as it could result in a :py:exc:`KeyError`. +Requiring :py:meth:`~dict.get` everywhere would be too cumbersome. (Note that you +are free to use :py:meth:`~dict.get` with total ``TypedDict``\s as well.) + +Keys that aren't required are shown with a ``?`` in error messages: + +.. code-block:: python + + # Revealed type is "TypedDict('GuiOptions', {'language'?: builtins.str, + # 'color'?: builtins.str})" + reveal_type(options) + +Totality also affects structural compatibility. You can't use a partial +``TypedDict`` when a total one is expected. Also, a total ``TypedDict`` is not +valid when a partial one is expected. + +Supported operations +-------------------- + +``TypedDict`` objects support a subset of dictionary operations and methods. +You must use string literals as keys when calling most of the methods, +as otherwise mypy won't be able to check that the key is valid. List +of supported operations: + +* Anything included in :py:class:`~typing.Mapping`: + + * ``d[key]`` + * ``key in d`` + * ``len(d)`` + * ``for key in d`` (iteration) + * :py:meth:`d.get(key[, default]) ` + * :py:meth:`d.keys() ` + * :py:meth:`d.values() ` + * :py:meth:`d.items() ` + +* :py:meth:`d.copy() ` +* :py:meth:`d.setdefault(key, default) ` +* :py:meth:`d1.update(d2) ` +* :py:meth:`d.pop(key[, default]) ` (partial ``TypedDict``\s only) +* ``del d[key]`` (partial ``TypedDict``\s only) + +.. note:: + + :py:meth:`~dict.clear` and :py:meth:`~dict.popitem` are not supported since they are unsafe + -- they could delete required ``TypedDict`` items that are not visible to + mypy because of structural subtyping. + +Class-based syntax +------------------ + +An alternative, class-based syntax to define a ``TypedDict`` is supported +in Python 3.6 and later: + +.. code-block:: python + + from typing_extensions import TypedDict + + class Movie(TypedDict): + name: str + year: int + +The above definition is equivalent to the original ``Movie`` +definition. It doesn't actually define a real class. This syntax also +supports a form of inheritance -- subclasses can define additional +items. However, this is primarily a notational shortcut. Since mypy +uses structural compatibility with ``TypedDict``\s, inheritance is not +required for compatibility. Here is an example of inheritance: + +.. code-block:: python + + class Movie(TypedDict): + name: str + year: int + + class BookBasedMovie(Movie): + based_on: str + +Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. + +Mixing required and non-required items +-------------------------------------- + +In addition to allowing reuse across ``TypedDict`` types, inheritance also allows +you to mix required and non-required (using ``total=False``) items +in a single ``TypedDict``. Example: + +.. code-block:: python + + class MovieBase(TypedDict): + name: str + year: int + + class Movie(MovieBase, total=False): + based_on: str + +Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` +can be left out when constructing an object. A ``TypedDict`` with a mix of required +and non-required keys, such as ``Movie`` above, will only be compatible with +another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the +first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys +in the first ``TypedDict``. + +Unions of TypedDicts +-------------------- + +Since TypedDicts are really just regular dicts at runtime, it is not possible to +use ``isinstance`` checks to distinguish between different variants of a Union of +TypedDict in the same way you can with regular objects. + +Instead, you can use the :ref:`tagged union pattern `. The referenced +section of the docs has a full description with an example, but in short, you will +need to give each TypedDict the same key where each value has a unique +:ref:`Literal type `. Then, check that key to distinguish +between your TypedDicts. From 2adc98f521620a3390fd8402852df85af6a05829 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 12:08:21 -0800 Subject: [PATCH 234/292] Improve the Common Issues page (#14581) Linking #13681 --- docs/source/common_issues.rst | 149 ++++++++++++++++------------------ docs/source/running_mypy.rst | 8 ++ 2 files changed, 80 insertions(+), 77 deletions(-) diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 465035307d5d..afb8e7d3ffe1 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -9,15 +9,6 @@ doesn't work as expected. Statically typed code is often identical to normal Python code (except for type annotations), but sometimes you need to do things slightly differently. -Can't install mypy using pip ----------------------------- - -If installation fails, you've probably hit one of these issues: - -* Mypy needs Python 3.6 or later to run. -* You may have to run pip like this: - ``python3 -m pip install mypy``. - .. _annotations_needed: No errors reported for obviously wrong code @@ -26,7 +17,9 @@ No errors reported for obviously wrong code There are several common reasons why obviously wrong code is not flagged as an error. -**The function containing the error is not annotated.** Functions that +**The function containing the error is not annotated.** + +Functions that do not have any annotations (neither for any argument nor for the return type) are not type-checked, and even the most blatant type errors (e.g. ``2 + 'a'``) pass silently. The solution is to add @@ -52,7 +45,9 @@ once you add annotations: If you don't know what types to add, you can use ``Any``, but beware: -**One of the values involved has type 'Any'.** Extending the above +**One of the values involved has type 'Any'.** + +Extending the above example, if we were to leave out the annotation for ``a``, we'd get no error: @@ -68,49 +63,52 @@ The reason is that if the type of ``a`` is unknown, the type of If you're having trouble debugging such situations, :ref:`reveal_type() ` might come in handy. -Note that sometimes library stubs have imprecise type information, -e.g. the :py:func:`pow` builtin returns ``Any`` (see `typeshed issue 285 -`_ for the reason). +Note that sometimes library stubs with imprecise type information +can be a source of ``Any`` values. :py:meth:`__init__ ` **method has no annotated -arguments or return type annotation.** :py:meth:`__init__ ` -is considered fully-annotated **if at least one argument is annotated**, -while mypy will infer the return type as ``None``. -The implication is that, for a :py:meth:`__init__ ` method -that has no argument, you'll have to explicitly annotate the return type -as ``None`` to type-check this :py:meth:`__init__ ` method: +arguments and no return type annotation.** + +This is basically a combination of the two cases above, in that ``__init__`` +without annotations can cause ``Any`` types leak into instance variables: .. code-block:: python - def foo(s: str) -> str: - return s + class Bad: + def __init__(self): + self.value = "asdf" + 1 + "asdf" # No error! + + bad = Bad() + bad.value + 1 # No error! + reveal_type(bad) # Revealed type is "__main__.Bad" + reveal_type(bad.value) # Revealed type is "Any" - class A(): - def __init__(self, value: str): # Return type inferred as None, considered as typed method + class Good: + def __init__(self) -> None: # Explicitly return None self.value = value - foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" - - class B(): - def __init__(self): # No argument is annotated, considered as untyped method - foo(1) # No error! - - class C(): - def __init__(self) -> None: # Must specify return type to type-check - foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" - -**Some imports may be silently ignored**. Another source of -unexpected ``Any`` values are the :option:`--ignore-missing-imports -` and :option:`--follow-imports=skip -` flags. When you use :option:`--ignore-missing-imports `, -any imported module that cannot be found is silently replaced with -``Any``. When using :option:`--follow-imports=skip ` the same is true for -modules for which a ``.py`` file is found but that are not specified -on the command line. (If a ``.pyi`` stub is found it is always -processed normally, regardless of the value of -:option:`--follow-imports `.) To help debug the former situation (no -module found at all) leave out :option:`--ignore-missing-imports `; to get -clarity about the latter use :option:`--follow-imports=error `. You can -read up about these and other useful flags in :ref:`command-line`. + + +**Some imports may be silently ignored**. + +A common source of unexpected ``Any`` values is the +:option:`--ignore-missing-imports ` flag. + +When you use :option:`--ignore-missing-imports `, +any imported module that cannot be found is silently replaced with ``Any``. + +To help debug this, simply leave out +:option:`--ignore-missing-imports `. +As mentioned in :ref:`fix-missing-imports`, setting ``ignore_missing_imports=True`` +on a per-module basis will make bad surprises less likely and is highly encouraged. + +Use of the :option:`--follow-imports=skip ` flags can also +cause problems. Use of these flags is strongly discouraged and only required in +relatively niche situations. See :ref:`follow-imports` for more information. + +**mypy considers some of your code unreachable**. + +See :ref:`unreachable` for more information. **A function annotated as returning a non-optional type returns 'None' and mypy doesn't complain**. @@ -186,25 +184,17 @@ over ``.py`` files. Ignoring a whole file --------------------- -A ``# type: ignore`` comment at the top of a module (before any statements, +* To only ignore errors, use a top-level ``# mypy: ignore-errors`` comment instead. +* To only ignore errors with a specific error code, use a top-level + ``# mypy: disable-error-code=...`` comment. +* To replace the contents of a module with ``Any``, use a per-module ``follow_imports = skip``. + See :ref:`Following imports ` for details. + +Note that a ``# type: ignore`` comment at the top of a module (before any statements, including imports or docstrings) has the effect of ignoring the entire contents of the module. This behaviour can be surprising and result in "Module ... has no attribute ... [attr-defined]" errors. -To only ignore errors, use a top-level ``# mypy: ignore-errors`` comment instead. -To only ignore errors with a specific error code, use a top-level -``# mypy: disable-error-code=...`` comment. -To replace the contents of the module with ``Any``, use a per-module ``follow_imports = skip``. -See :ref:`Following imports ` for details. - -.. code-block:: python - - # type: ignore - - import foo - - foo.bar() - Issues with code at runtime --------------------------- @@ -262,20 +252,20 @@ Redefinitions with incompatible types Each name within a function only has a single 'declared' type. You can reuse for loop indices etc., but if you want to use a variable with -multiple types within a single function, you may need to declare it -with the ``Any`` type. +multiple types within a single function, you may need to instead use +multiple variables (or maybe declare the variable with an ``Any`` type). .. code-block:: python def f() -> None: n = 1 ... - n = 'x' # Type error: n has type int + n = 'x' # error: Incompatible types in assignment (expression has type "str", variable has type "int") .. note:: - This limitation could be lifted in a future mypy - release. + Using the :option:`--allow-redefinition ` + flag can suppress this error in several cases. Note that you can redefine a variable with a more *precise* or a more concrete type. For example, you can redefine a sequence (which does @@ -289,6 +279,8 @@ not support ``sort()``) as a list and sort it in-place: # Type of x is List[int] here. x.sort() # Okay! +See :ref:`type-narrowing` for more information. + .. _variance: Invariance vs covariance @@ -340,24 +332,24 @@ Declaring a supertype as variable type Sometimes the inferred type is a subtype (subclass) of the desired type. The type inference uses the first assignment to infer the type -of a name (assume here that ``Shape`` is the base class of both -``Circle`` and ``Triangle``): +of a name: .. code-block:: python - shape = Circle() # Infer shape to be Circle - ... - shape = Triangle() # Type error: Triangle is not a Circle + class Shape: ... + class Circle(Shape): ... + class Triangle(Shape): ... + + shape = Circle() # mypy infers the type of shape to be Circle + shape = Triangle() # error: Incompatible types in assignment (expression has type "Triangle", variable has type "Circle") You can just give an explicit type for the variable in cases such the above example: .. code-block:: python - shape = Circle() # type: Shape # The variable s can be any Shape, - # not just Circle - ... - shape = Triangle() # OK + shape: Shape = Circle() # The variable s can be any Shape, not just Circle + shape = Triangle() # OK Complex type tests ------------------ @@ -622,7 +614,10 @@ You can install the latest development version of mypy from source. Clone the git clone https://github.com/python/mypy.git cd mypy - sudo python3 -m pip install --upgrade . + python3 -m pip install --upgrade . + +To install a development version of mypy that is mypyc-compiled, see the +instructions at the `mypyc wheels repo `_. Variables vs type aliases ------------------------- diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index c5222d9d5f47..b0cefec9dafa 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -357,6 +357,9 @@ other than the one mypy is running in, you can use :option:`--python-executable ` flag to point to the Python executable for that environment, and mypy will find packages installed for that Python executable. +If you've installed the relevant stub packages and are still getting this error, +see the :ref:`section below `. + .. _missing-type-hints-for-third-party-library: Cannot find implementation or library stub @@ -379,6 +382,11 @@ this error, try: line flag to point the Python interpreter containing your installed third party packages. + You can confirm that you are running mypy from the environment you expect + by running it like ``python -m mypy ...``. You can confirm that you are + installing into the environment you expect by running pip like + ``python -m pip ...``. + 2. Reading the :ref:`finding-imports` section below to make sure you understand how exactly mypy searches for and finds modules and modify how you're invoking mypy accordingly. From 7bde0d6daf51332eb9a8bbdcc22cadcc9750f354 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 12:40:28 -0800 Subject: [PATCH 235/292] Improve protocols documentation (#14577) Linking #13681 --- docs/source/protocols.rst | 89 +++++++++++++++++++++++++++++++++------ 1 file changed, 76 insertions(+), 13 deletions(-) diff --git a/docs/source/protocols.rst b/docs/source/protocols.rst index 603c9fd0dcc8..cb51809a66d5 100644 --- a/docs/source/protocols.rst +++ b/docs/source/protocols.rst @@ -4,14 +4,17 @@ Protocols and structural subtyping ================================== Mypy supports two ways of deciding whether two classes are compatible -as types: nominal subtyping and structural subtyping. *Nominal* -subtyping is strictly based on the class hierarchy. If class ``D`` +as types: nominal subtyping and structural subtyping. + +*Nominal* subtyping is strictly based on the class hierarchy. If class ``D`` inherits class ``C``, it's also a subtype of ``C``, and instances of ``D`` can be used when ``C`` instances are expected. This form of subtyping is used by default in mypy, since it's easy to understand and produces clear and concise error messages, and since it matches how the native :py:func:`isinstance ` check works -- based on class -hierarchy. *Structural* subtyping can also be useful. Class ``D`` is +hierarchy. + +*Structural* subtyping is based on the operations that can be performed with an object. Class ``D`` is a structural subtype of class ``C`` if the former has all attributes and methods of the latter, and with compatible types. @@ -72,15 +75,16 @@ class: from typing_extensions import Protocol class SupportsClose(Protocol): - def close(self) -> None: - ... # Empty method body (explicit '...') + # Empty method body (explicit '...') + def close(self) -> None: ... class Resource: # No SupportsClose base class! - # ... some methods ... def close(self) -> None: self.resource.release() + # ... other methods ... + def close_all(items: Iterable[SupportsClose]) -> None: for item in items: item.close() @@ -146,7 +150,9 @@ present if you are defining a protocol: You can also include default implementations of methods in protocols. If you explicitly subclass these protocols you can inherit -these default implementations. Explicitly including a protocol as a +these default implementations. + +Explicitly including a protocol as a base class is also a way of documenting that your class implements a particular protocol, and it forces mypy to verify that your class implementation is actually compatible with the protocol. In particular, @@ -157,12 +163,62 @@ abstract: class SomeProto(Protocol): attr: int # Note, no right hand side - def method(self) -> str: ... # Literal ... here + def method(self) -> str: ... # Literally just ... here + class ExplicitSubclass(SomeProto): pass + ExplicitSubclass() # error: Cannot instantiate abstract class 'ExplicitSubclass' # with abstract attributes 'attr' and 'method' +Invariance of protocol attributes +********************************* + +A common issue with protocols is that protocol attributes are invariant. +For example: + +.. code-block:: python + + class Box(Protocol): + content: object + + class IntBox: + content: int + + def takes_box(box: Box) -> None: ... + + takes_box(IntBox()) # error: Argument 1 to "takes_box" has incompatible type "IntBox"; expected "Box" + # note: Following member(s) of "IntBox" have conflicts: + # note: content: expected "object", got "int" + +This is because ``Box`` defines ``content`` as a mutable attribute. +Here's why this is problematic: + +.. code-block:: python + + def takes_box_evil(box: Box) -> None: + box.content = "asdf" # This is bad, since box.content is supposed to be an object + + my_int_box = IntBox() + takes_box_evil(my_int_box) + my_int_box.content + 1 # Oops, TypeError! + +This can be fixed by declaring ``content`` to be read-only in the ``Box`` +protocol using ``@property``: + +.. code-block:: python + + class Box(Protocol): + @property + def content(self) -> object: ... + + class IntBox: + content: int + + def takes_box(box: Box) -> None: ... + + takes_box(IntBox(42)) # OK + Recursive protocols ******************* @@ -197,7 +253,7 @@ Using isinstance() with protocols You can use a protocol class with :py:func:`isinstance` if you decorate it with the ``@runtime_checkable`` class decorator. The decorator adds -support for basic runtime structural checks: +rudimentary support for runtime structural checks: .. code-block:: python @@ -214,16 +270,23 @@ support for basic runtime structural checks: def use(handles: int) -> None: ... mug = Mug() - if isinstance(mug, Portable): - use(mug.handles) # Works statically and at runtime + if isinstance(mug, Portable): # Works at runtime! + use(mug.handles) :py:func:`isinstance` also works with the :ref:`predefined protocols ` in :py:mod:`typing` such as :py:class:`~typing.Iterable`. -.. note:: +.. warning:: :py:func:`isinstance` with protocols is not completely safe at runtime. For example, signatures of methods are not checked. The runtime - implementation only checks that all protocol members are defined. + implementation only checks that all protocol members exist, + not that they have the correct type. :py:func:`issubclass` with protocols + will only check for the existence of methods. + +.. note:: + :py:func:`isinstance` with protocols can also be surprisingly slow. + In many cases, you're better served by using :py:func:`hasattr` to + check for the presence of attributes. .. _callback_protocols: From e2e0fbe22bd37e2c3fa796ef7bf4743d3451e4d5 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 12:43:02 -0800 Subject: [PATCH 236/292] Improve Generics docs page (#14587) Linking https://github.com/python/mypy/issues/13681 --- docs/source/generics.rst | 474 ++++++++++++++++++++++----------------- 1 file changed, 262 insertions(+), 212 deletions(-) diff --git a/docs/source/generics.rst b/docs/source/generics.rst index b8fefd27870f..9ac79f90121d 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -50,17 +50,9 @@ Using ``Stack`` is similar to built-in container types: stack = Stack[int]() stack.push(2) stack.pop() - stack.push('x') # Type error + stack.push('x') # error: Argument 1 to "push" of "Stack" has incompatible type "str"; expected "int" -Type inference works for user-defined generic types as well: - -.. code-block:: python - - def process(stack: Stack[int]) -> None: ... - - process(Stack()) # Argument has inferred type Stack[int] - -Construction of instances of generic types is also type checked: +Construction of instances of generic types is type checked: .. code-block:: python @@ -68,77 +60,17 @@ Construction of instances of generic types is also type checked: def __init__(self, content: T) -> None: self.content = content - Box(1) # OK, inferred type is Box[int] + Box(1) # OK, inferred type is Box[int] Box[int](1) # Also OK - s = 'some string' - Box[int](s) # Type error - -Generic class internals -*********************** - -You may wonder what happens at runtime when you index -``Stack``. Indexing ``Stack`` returns a *generic alias* -to ``Stack`` that returns instances of the original class on -instantiation: - -.. code-block:: python - - >>> print(Stack) - __main__.Stack - >>> print(Stack[int]) - __main__.Stack[int] - >>> print(Stack[int]().__class__) - __main__.Stack - -Generic aliases can be instantiated or subclassed, similar to real -classes, but the above examples illustrate that type variables are -erased at runtime. Generic ``Stack`` instances are just ordinary -Python objects, and they have no extra runtime overhead or magic due -to being generic, other than a metaclass that overloads the indexing -operator. - -Note that in Python 3.8 and lower, the built-in types -:py:class:`list`, :py:class:`dict` and others do not support indexing. -This is why we have the aliases :py:class:`~typing.List`, -:py:class:`~typing.Dict` and so on in the :py:mod:`typing` -module. Indexing these aliases gives you a generic alias that -resembles generic aliases constructed by directly indexing the target -class in more recent versions of Python: - -.. code-block:: python - - >>> # Only relevant for Python 3.8 and below - >>> # For Python 3.9 onwards, prefer `list[int]` syntax - >>> from typing import List - >>> List[int] - typing.List[int] - -Note that the generic aliases in ``typing`` don't support constructing -instances: - -.. code-block:: python - - >>> from typing import List - >>> List[int]() - Traceback (most recent call last): - ... - TypeError: Type List cannot be instantiated; use list() instead - -.. note:: - - In Python 3.6 indexing generic types or type aliases results in actual - type objects. This means that generic types in type annotations can - have a significant runtime cost. This was changed in Python 3.7, and - indexing generic types became a cheap operation. + Box[int]('some string') # error: Argument 1 to "Box" has incompatible type "str"; expected "int" .. _generic-subclasses: -Defining sub-classes of generic classes -*************************************** +Defining subclasses of generic classes +************************************** User-defined generic classes and generic classes defined in :py:mod:`typing` -can be used as base classes for another classes, both generic and -non-generic. For example: +can be used as a base class for another class (generic or non-generic). For example: .. code-block:: python @@ -147,29 +79,29 @@ non-generic. For example: KT = TypeVar('KT') VT = TypeVar('VT') - class MyMap(Mapping[KT, VT]): # This is a generic subclass of Mapping - def __getitem__(self, k: KT) -> VT: - ... # Implementations omitted - def __iter__(self) -> Iterator[KT]: - ... - def __len__(self) -> int: - ... + # This is a generic subclass of Mapping + class MyMap(Mapping[KT, VT]): + def __getitem__(self, k: KT) -> VT: ... + def __iter__(self) -> Iterator[KT]: ... + def __len__(self) -> int: ... - items: MyMap[str, int] # Okay + items: MyMap[str, int] # OK - class StrDict(dict[str, str]): # This is a non-generic subclass of dict + # This is a non-generic subclass of dict + class StrDict(dict[str, str]): def __str__(self) -> str: return f'StrDict({super().__str__()})' + data: StrDict[int, int] # Error! StrDict is not generic data2: StrDict # OK + # This is a user-defined generic class class Receiver(Generic[T]): - def accept(self, value: T) -> None: - ... + def accept(self, value: T) -> None: ... - class AdvancedReceiver(Receiver[T]): - ... + # This is a generic subclass of Receiver + class AdvancedReceiver(Receiver[T]): ... .. note:: @@ -215,15 +147,16 @@ For example: Generic functions ***************** -Generic type variables can also be used to define generic functions: +Type variables can be used to define generic functions: .. code-block:: python from typing import TypeVar, Sequence - T = TypeVar('T') # Declare type variable + T = TypeVar('T') - def first(seq: Sequence[T]) -> T: # Generic function + # A generic function! + def first(seq: Sequence[T]) -> T: return seq[0] As with generic classes, the type variable can be replaced with any @@ -232,10 +165,8 @@ return type is derived from the sequence item type. For example: .. code-block:: python - # Assume first defined as above. - - s = first('foo') # s has type str. - n = first([1, 2, 3]) # n has type int. + reveal_type(first([1, 2, 3])) # Revealed type is "builtins.int" + reveal_type(first(['a', 'b'])) # Revealed type is "builtins.str" Note also that a single definition of a type variable (such as ``T`` above) can be used in multiple generic functions or classes. In this @@ -406,51 +337,84 @@ relations between them: invariant, covariant, and contravariant. Assuming that we have a pair of types ``A`` and ``B``, and ``B`` is a subtype of ``A``, these are defined as follows: -* A generic class ``MyCovGen[T, ...]`` is called covariant in type variable - ``T`` if ``MyCovGen[B, ...]`` is always a subtype of ``MyCovGen[A, ...]``. -* A generic class ``MyContraGen[T, ...]`` is called contravariant in type - variable ``T`` if ``MyContraGen[A, ...]`` is always a subtype of - ``MyContraGen[B, ...]``. -* A generic class ``MyInvGen[T, ...]`` is called invariant in ``T`` if neither +* A generic class ``MyCovGen[T]`` is called covariant in type variable + ``T`` if ``MyCovGen[B]`` is always a subtype of ``MyCovGen[A]``. +* A generic class ``MyContraGen[T]`` is called contravariant in type + variable ``T`` if ``MyContraGen[A]`` is always a subtype of + ``MyContraGen[B]``. +* A generic class ``MyInvGen[T]`` is called invariant in ``T`` if neither of the above is true. Let us illustrate this by few simple examples: -* :py:data:`~typing.Union` is covariant in all variables: ``Union[Cat, int]`` is a subtype - of ``Union[Animal, int]``, - ``Union[Dog, int]`` is also a subtype of ``Union[Animal, int]``, etc. - Most immutable containers such as :py:class:`~typing.Sequence` and :py:class:`~typing.FrozenSet` are also - covariant. -* :py:data:`~typing.Callable` is an example of type that behaves contravariant in types of - arguments, namely ``Callable[[Employee], int]`` is a subtype of - ``Callable[[Manager], int]``. To understand this, consider a function: +.. code-block:: python + + # We'll use these classes in the examples below + class Shape: ... + class Triangle(Shape): ... + class Square(Shape): ... + +* Most immutable containers, such as :py:class:`~typing.Sequence` and + :py:class:`~typing.FrozenSet` are covariant. :py:data:`~typing.Union` is + also covariant in all variables: ``Union[Triangle, int]`` is + a subtype of ``Union[Shape, int]``. .. code-block:: python - def salaries(staff: list[Manager], - accountant: Callable[[Manager], int]) -> list[int]: ... + def count_lines(shapes: Sequence[Shape]) -> int: + return sum(shape.num_sides for shape in shapes) - This function needs a callable that can calculate a salary for managers, and - if we give it a callable that can calculate a salary for an arbitrary - employee, it's still safe. -* :py:class:`~typing.List` is an invariant generic type. Naively, one would think - that it is covariant, but let us consider this code: + triangles: Sequence[Triangle] + count_lines(triangles) # OK + + def foo(triangle: Triangle, num: int): + shape_or_number: Union[Shape, int] + # a Triangle is a Shape, and a Shape is a valid Union[Shape, int] + shape_or_number = triangle + + Covariance should feel relatively intuitive, but contravariance and invariance + can be harder to reason about. + +* :py:data:`~typing.Callable` is an example of type that behaves contravariant + in types of arguments. That is, ``Callable[[Shape], int]`` is a subtype of + ``Callable[[Triangle], int]``, despite ``Shape`` being a supertype of + ``Triangle``. To understand this, consider: .. code-block:: python - class Shape: - pass + def cost_of_paint_required( + triangle: Triangle, + area_calculator: Callable[[Triangle], float] + ) -> float: + return area_calculator(triangle) * DOLLAR_PER_SQ_FT + + # This straightforwardly works + def area_of_triangle(triangle: Triangle) -> float: ... + cost_of_paint_required(triangle, area_of_triangle) # OK + + # But this works as well! + def area_of_any_shape(shape: Shape) -> float: ... + cost_of_paint_required(triangle, area_of_any_shape) # OK + + ``cost_of_paint_required`` needs a callable that can calculate the area of a + triangle. If we give it a callable that can calculate the area of an + arbitrary shape (not just triangles), everything still works. + +* :py:class:`~typing.List` is an invariant generic type. Naively, one would think + that it is covariant, like :py:class:`~typing.Sequence` above, but consider this code: + + .. code-block:: python class Circle(Shape): - def rotate(self): - ... + # The rotate method is only defined on Circle, not on Shape + def rotate(self): ... def add_one(things: list[Shape]) -> None: things.append(Shape()) - my_things: list[Circle] = [] - add_one(my_things) # This may appear safe, but... - my_things[0].rotate() # ...this will fail + my_circles: list[Circle] = [] + add_one(my_circles) # This may appear safe, but... + my_circles[-1].rotate() # ...this will fail, since my_circles[0] is now a Shape, not a Circle Another example of invariant type is :py:class:`~typing.Dict`. Most mutable containers are invariant. @@ -478,6 +442,45 @@ type variables defined with special keyword arguments ``covariant`` or my_box = Box(Cat()) look_into(my_box) # OK, but mypy would complain here for an invariant type +.. _type-variable-upper-bound: + +Type variables with upper bounds +******************************** + +A type variable can also be restricted to having values that are +subtypes of a specific type. This type is called the upper bound of +the type variable, and is specified with the ``bound=...`` keyword +argument to :py:class:`~typing.TypeVar`. + +.. code-block:: python + + from typing import TypeVar, SupportsAbs + + T = TypeVar('T', bound=SupportsAbs[float]) + +In the definition of a generic function that uses such a type variable +``T``, the type represented by ``T`` is assumed to be a subtype of +its upper bound, so the function can use methods of the upper bound on +values of type ``T``. + +.. code-block:: python + + def largest_in_absolute_value(*xs: T) -> T: + return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. + +In a call to such a function, the type ``T`` must be replaced by a +type that is a subtype of its upper bound. Continuing the example +above: + +.. code-block:: python + + largest_in_absolute_value(-3.5, 2) # Okay, has type float. + largest_in_absolute_value(5+6j, 7) # Okay, has type complex. + largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. + +Type parameters of generic classes may also have upper bounds, which +restrict the valid values for the type parameter in the same way. + .. _type-variable-value-restriction: Type variables with value restriction @@ -512,7 +515,7 @@ argument types: concat(b'a', b'b') # Okay concat(1, 2) # Error! -Note that this is different from a union type, since combinations +Importantly, this is different from a union type, since combinations of ``str`` and ``bytes`` are not accepted: .. code-block:: python @@ -520,8 +523,8 @@ of ``str`` and ``bytes`` are not accepted: concat('string', b'bytes') # Error! In this case, this is exactly what we want, since it's not possible -to concatenate a string and a bytes object! The type checker -will reject this function: +to concatenate a string and a bytes object! If we tried to use +``Union``, the type checker would complain about this possibility: .. code-block:: python @@ -536,10 +539,13 @@ subtype of ``str``: class S(str): pass ss = concat(S('foo'), S('bar')) + reveal_type(ss) # Revealed type is "builtins.str" You may expect that the type of ``ss`` is ``S``, but the type is actually ``str``: a subtype gets promoted to one of the valid values -for the type variable, which in this case is ``str``. This is thus +for the type variable, which in this case is ``str``. + +This is thus subtly different from *bounded quantification* in languages such as Java, where the return type would be ``S``. The way mypy implements this is correct for ``concat``, since ``concat`` actually returns a @@ -555,66 +561,25 @@ values when defining a generic class. For example, mypy uses the type :py:class:`Pattern[AnyStr] ` for the return value of :py:func:`re.compile`, since regular expressions can be based on a string or a bytes pattern. -.. _type-variable-upper-bound: - -Type variables with upper bounds -******************************** - -A type variable can also be restricted to having values that are -subtypes of a specific type. This type is called the upper bound of -the type variable, and is specified with the ``bound=...`` keyword -argument to :py:class:`~typing.TypeVar`. - -.. code-block:: python - - from typing import TypeVar, SupportsAbs - - T = TypeVar('T', bound=SupportsAbs[float]) - -In the definition of a generic function that uses such a type variable -``T``, the type represented by ``T`` is assumed to be a subtype of -its upper bound, so the function can use methods of the upper bound on -values of type ``T``. - -.. code-block:: python - - def largest_in_absolute_value(*xs: T) -> T: - return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. - -In a call to such a function, the type ``T`` must be replaced by a -type that is a subtype of its upper bound. Continuing the example -above, - -.. code-block:: python - - largest_in_absolute_value(-3.5, 2) # Okay, has type float. - largest_in_absolute_value(5+6j, 7) # Okay, has type complex. - largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. - -Type parameters of generic classes may also have upper bounds, which -restrict the valid values for the type parameter in the same way. - A type variable may not have both a value restriction (see -:ref:`type-variable-value-restriction`) and an upper bound. +:ref:`type-variable-upper-bound`) and an upper bound. .. _declaring-decorators: Declaring decorators ******************** -One common application of type variables along with parameter specifications -is in declaring a decorator that preserves the signature of the function it decorates. - -Note that class decorators are handled differently than function decorators in -mypy: decorating a class does not erase its type, even if the decorator has -incomplete type annotations. +Decorators are typically functions that take a function as an argument and +return another function. Describing this behaviour in terms of types can +be a little tricky; we'll show how you can use ``TypeVar`` and a special +kind of type variable called a *parameter specification* to do so. Suppose we have the following decorator, not type annotated yet, that preserves the original function's signature and merely prints the decorated function's name: .. code-block:: python - def my_decorator(func): + def printing_decorator(func): def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) @@ -625,20 +590,28 @@ and we use it to decorate function ``add_forty_two``: .. code-block:: python # A decorated function. - @my_decorator + @printing_decorator def add_forty_two(value: int) -> int: return value + 42 a = add_forty_two(3) -Since ``my_decorator`` is not type-annotated, the following won't get type-checked: +Since ``printing_decorator`` is not type-annotated, the following won't get type checked: .. code-block:: python - reveal_type(a) # revealed type: Any - add_forty_two('foo') # no type-checker error :( + reveal_type(a) # Revealed type is "Any" + add_forty_two('foo') # No type checker error :( + +This is a sorry state of affairs! If you run with ``--strict``, mypy will +even alert you to this fact: +``Untyped decorator makes function "add_forty_two" untyped`` + +Note that class decorators are handled differently than function decorators in +mypy: decorating a class does not erase its type, even if the decorator has +incomplete type annotations. -Before parameter specifications, here's how one might have annotated the decorator: +Here's how one could annotate the decorator: .. code-block:: python @@ -647,50 +620,58 @@ Before parameter specifications, here's how one might have annotated the decorat F = TypeVar('F', bound=Callable[..., Any]) # A decorator that preserves the signature. - def my_decorator(func: F) -> F: + def printing_decorator(func: F) -> F: def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) return cast(F, wrapper) -and that would enable the following type checks: - -.. code-block:: python + @printing_decorator + def add_forty_two(value: int) -> int: + return value + 42 - reveal_type(a) # Revealed type is "builtins.int" + a = add_forty_two(3) + reveal_type(a) # Revealed type is "builtins.int" add_forty_two('x') # Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" +This still has some shortcomings. First, we need to use the unsafe +:py:func:`~typing.cast` to convince mypy that ``wrapper()`` has the same +signature as ``func``. See :ref:`casts `. -Note that the ``wrapper()`` function is not type-checked. Wrapper -functions are typically small enough that this is not a big +Second, the ``wrapper()`` function is not tightly type checked, although +wrapper functions are typically small enough that this is not a big problem. This is also the reason for the :py:func:`~typing.cast` call in the -``return`` statement in ``my_decorator()``. See :ref:`casts `. However, -with the introduction of parameter specifications in mypy 0.940, we can now -have a more faithful type annotation: +``return`` statement in ``printing_decorator()``. + +However, we can use a parameter specification (:py:class:`~typing.ParamSpec`), +for a more faithful type annotation: .. code-block:: python - from typing import Callable, ParamSpec, TypeVar + from typing import Callable, TypeVar + from typing_extensions import ParamSpec P = ParamSpec('P') T = TypeVar('T') - def my_decorator(func: Callable[P, T]) -> Callable[P, T]: + def printing_decorator(func: Callable[P, T]) -> Callable[P, T]: def wrapper(*args: P.args, **kwds: P.kwargs) -> T: print("Calling", func) return func(*args, **kwds) return wrapper -When the decorator alters the signature, parameter specifications truly show their potential: +Parameter specifications also allow you to describe decorators that +alter the signature of the input function: .. code-block:: python - from typing import Callable, ParamSpec, TypeVar + from typing import Callable, TypeVar + from typing_extensions import ParamSpec P = ParamSpec('P') T = TypeVar('T') - # Note: We reuse 'P' in the return type, but replace 'T' with 'str' + # We reuse 'P' in the return type, but replace 'T' with 'str' def stringify(func: Callable[P, T]) -> Callable[P, str]: def wrapper(*args: P.args, **kwds: P.kwargs) -> str: return str(func(*args, **kwds)) @@ -701,9 +682,30 @@ When the decorator alters the signature, parameter specifications truly show the return value + 42 a = add_forty_two(3) - reveal_type(a) # str - foo('x') # Type check error: incompatible type "str"; expected "int" + reveal_type(a) # Revealed type is "builtins.str" + add_forty_two('x') # error: Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" +Or insert an argument: + +.. code-block:: python + + from typing import Callable, TypeVar + from typing_extensions import Concatenate, ParamSpec + + P = ParamSpec('P') + T = TypeVar('T') + + def printing_decorator(func: Callable[P, T]) -> Callable[Concatenate[str, P], T]: + def wrapper(msg: str, /, *args: P.args, **kwds: P.kwargs) -> T: + print("Calling", func, "with", msg) + return func(*args, **kwds) + return wrapper + + @printing_decorator + def add_forty_two(value: int) -> int: + return value + 42 + + a = add_forty_two('three', 3) .. _decorator-factories: @@ -793,9 +795,8 @@ protocols mostly follow the normal rules for generic classes. Example: y: Box[int] = ... x = y # Error -- Box is invariant -Per :pep:`PEP 544: Generic protocols <544#generic-protocols>`, ``class -ClassName(Protocol[T])`` is allowed as a shorthand for ``class -ClassName(Protocol, Generic[T])``. +Note that ``class ClassName(Protocol[T])`` is allowed as a shorthand for +``class ClassName(Protocol, Generic[T])``, as per :pep:`PEP 544: Generic protocols <544#generic-protocols>`, The main difference between generic protocols and ordinary generic classes is that mypy checks that the declared variances of generic @@ -806,20 +807,18 @@ variable is invariant: .. code-block:: python - from typing import TypeVar - from typing_extensions import Protocol + from typing import Protocol, TypeVar T = TypeVar('T') - class ReadOnlyBox(Protocol[T]): # Error: covariant type variable expected + class ReadOnlyBox(Protocol[T]): # error: Invariant type variable "T" used in protocol where covariant one is expected def content(self) -> T: ... This example correctly uses a covariant type variable: .. code-block:: python - from typing import TypeVar - from typing_extensions import Protocol + from typing import Protocol, TypeVar T_co = TypeVar('T_co', covariant=True) @@ -844,16 +843,12 @@ Generic protocols can also be recursive. Example: class L: val: int + def next(self) -> 'L': ... - ... # details omitted - - def next(self) -> 'L': - ... # details omitted - - def last(seq: Linked[T]) -> T: - ... # implementation omitted + def last(seq: Linked[T]) -> T: ... - result = last(L()) # Inferred type of 'result' is 'int' + result = last(L()) + reveal_type(result) # Revealed type is "builtins.int" .. _generic-type-aliases: @@ -925,3 +920,58 @@ defeating the purpose of using aliases. Example: Using type variable bounds or values in generic aliases has the same effect as in generic classes/functions. + + +Generic class internals +*********************** + +You may wonder what happens at runtime when you index a generic class. +Indexing returns a *generic alias* to the original class that returns instances +of the original class on instantiation: + +.. code-block:: python + + >>> from typing import TypeVar, Generic + >>> T = TypeVar('T') + >>> class Stack(Generic[T]): ... + >>> Stack + __main__.Stack + >>> Stack[int] + __main__.Stack[int] + >>> instance = Stack[int]() + >>> instance.__class__ + __main__.Stack + +Generic aliases can be instantiated or subclassed, similar to real +classes, but the above examples illustrate that type variables are +erased at runtime. Generic ``Stack`` instances are just ordinary +Python objects, and they have no extra runtime overhead or magic due +to being generic, other than a metaclass that overloads the indexing +operator. + +Note that in Python 3.8 and lower, the built-in types +:py:class:`list`, :py:class:`dict` and others do not support indexing. +This is why we have the aliases :py:class:`~typing.List`, +:py:class:`~typing.Dict` and so on in the :py:mod:`typing` +module. Indexing these aliases gives you a generic alias that +resembles generic aliases constructed by directly indexing the target +class in more recent versions of Python: + +.. code-block:: python + + >>> # Only relevant for Python 3.8 and below + >>> # For Python 3.9 onwards, prefer `list[int]` syntax + >>> from typing import List + >>> List[int] + typing.List[int] + +Note that the generic aliases in ``typing`` don't support constructing +instances: + +.. code-block:: python + + >>> from typing import List + >>> List[int]() + Traceback (most recent call last): + ... + TypeError: Type List cannot be instantiated; use list() instead From 0929773801ee2d7a1eca5c64ae9575f43e661a6f Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 14:29:50 -0800 Subject: [PATCH 237/292] Suggest importing from typing_extensions (#14591) --- mypy/semanal.py | 10 ++++++++++ test-data/unit/check-statements.test | 14 ++++++++++++++ test-data/unit/lib-stub/typing_extensions.pyi | 2 ++ 3 files changed, 26 insertions(+) diff --git a/mypy/semanal.py b/mypy/semanal.py index 31abc8c1a515..1256133cb5f3 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2597,6 +2597,16 @@ def report_missing_module_attribute( ): # Yes. Generate a helpful note. self.msg.add_fixture_note(fullname, context) + else: + typing_extensions = self.modules.get("typing_extensions") + if typing_extensions and source_id in typing_extensions.names: + self.msg.note( + f"Use `from typing_extensions import {source_id}` instead", context + ) + self.msg.note( + "See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module", + context, + ) def process_import_over_existing_name( self, diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index ed7349aaa296..b9551870ddfc 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -2198,3 +2198,17 @@ def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: [case testNoCrashOnStarRightHandSide] x = *(1, 2, 3) # E: Can use starred expression only as assignment target [builtins fixtures/tuple.pyi] + + +[case testTypingExtensionsSuggestion] +from typing import _FutureFeatureFixture + +# This import is only needed in tests. In real life, mypy will always have typing_extensions in its +# build due to its pervasive use in typeshed. This assumption may one day prove False, but when +# that day comes this suggestion will also be less helpful than it is today. +import typing_extensions +[out] +main:1: error: Module "typing" has no attribute "_FutureFeatureFixture" +main:1: note: Use `from typing_extensions import _FutureFeatureFixture` instead +main:1: note: See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index 89f7108fe83c..b03fc7e6df14 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -59,3 +59,5 @@ def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) - def reveal_type(__obj: T) -> T: pass def dataclass_transform() -> Callable[[T], T]: ... + +_FutureFeatureFixture = 0 From 50717cfa9c34beaf5eb6c756f0add1a53d35bb9e Mon Sep 17 00:00:00 2001 From: Rodrigo Silva Date: Thu, 2 Feb 2023 19:37:39 -0300 Subject: [PATCH 238/292] Improve misleading message in Enum() (#5317) (#14590) Clarifying this is a mypy limitation, not an Enum() requirement Not a true fix, but a workaround for #5317 to avoid confusion (as seen by the many duplicates) --- mypy/semanal_enum.py | 4 ++-- test-data/unit/check-enum.test | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index d48e620b89f1..c7b8e44f65aa 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -220,14 +220,14 @@ def parse_enum_call_args( items.append(field) else: return self.fail_enum_call_arg( - "%s() expects a string, tuple, list or dict literal as the second argument" + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" % class_name, call, ) else: # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}? return self.fail_enum_call_arg( - "%s() expects a string, tuple, list or dict literal as the second argument" + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" % class_name, call, ) diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 9343e8d5c562..80a7ca7ff99f 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -482,13 +482,13 @@ W.c [typing fixtures/typing-medium.pyi] [out] main:2: error: Too few arguments for Enum() -main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:3: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:4: error: Too many arguments for Enum() -main:5: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:5: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:5: error: Name "foo" is not defined -main:7: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:7: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:8: error: Too few arguments for IntEnum() -main:9: error: IntEnum() expects a string, tuple, list or dict literal as the second argument +main:9: error: Second argument of IntEnum() must be string, tuple, list or dict literal for mypy to determine Enum members main:10: error: Too many arguments for IntEnum() main:11: error: Enum() needs at least one item main:12: error: Enum() needs at least one item From f8765d4abc974285cedc0d45f94efbf19f9d9f39 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 2 Feb 2023 22:42:23 -0800 Subject: [PATCH 239/292] mypy_primer: use target base branch for finding merge base (#14595) --- .github/workflows/mypy_primer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index 9eef1c1c7466..e7e4af1f07b7 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -48,7 +48,7 @@ jobs: echo "new commit" git rev-list --format=%s --max-count=1 $GITHUB_SHA - MERGE_BASE=$(git merge-base $GITHUB_SHA origin/master) + MERGE_BASE=$(git merge-base $GITHUB_SHA origin/$GITHUB_BASE_REF) git checkout -b base_commit $MERGE_BASE echo "base commit" git rev-list --format=%s --max-count=1 base_commit From fcf539877edd403c76af77ac68decf9120538faa Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 3 Feb 2023 02:16:09 -0800 Subject: [PATCH 240/292] Bump version to 1.1.0+dev (#14593) The 1.0 release branch has been cut! --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index b125385f9b43..258a0e4f8bcb 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.0.0+dev" +__version__ = "1.1.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From 523c381b447c7b29f45b56e4c5ba84c495955869 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 3 Feb 2023 02:47:00 -0800 Subject: [PATCH 241/292] [used before def] add documentation (#14592) Adding documentation for used-before-def check. --- docs/source/error_code_list.rst | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 674ad08c4d09..11d01c884b33 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -89,6 +89,23 @@ This example accidentally calls ``sort()`` instead of :py:func:`sorted`: x = sort([3, 2, 4]) # Error: Name "sort" is not defined [name-defined] + +Check that a variable is not used before it's defined [used-before-def] +----------------------------------------------------------------------- + +Mypy will generate an error if a name is used before it's defined. +While the name-defined check will catch issues with names that are undefined, +it will not flag if a variable is used and then defined later in the scope. +used-before-def check will catch such cases. + +Example: + +.. code-block:: python + + print(x) # Error: Name "x" is used before definition [used-before-def] + x = 123 + + Check arguments in calls [call-arg] ----------------------------------- @@ -430,7 +447,7 @@ Example: # Error: Incompatible types (expression has type "float", # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} - + Check TypedDict Keys [typeddict-unknown-key] -------------------------------------------- From 11739e48df5e81ae5b7cbe3639f47fa2339213f6 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 3 Feb 2023 14:46:51 +0000 Subject: [PATCH 242/292] Stubtest: handle name-mangling edge cases better (#14596) --- mypy/stubtest.py | 2 +- mypy/test/teststubtest.py | 43 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 774f03cbbdd0..4a99c407f319 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -493,7 +493,7 @@ def verify_typeinfo( for entry in sorted(to_check): mangled_entry = entry if entry.startswith("__") and not entry.endswith("__"): - mangled_entry = f"_{stub.name}{entry}" + mangled_entry = f"_{stub.name.lstrip('_')}{entry}" stub_to_verify = next((t.names[entry].node for t in stub.mro if entry in t.names), MISSING) assert stub_to_verify is not None try: diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 5e59d8efec63..42dd40d76414 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1154,6 +1154,49 @@ def __mangle_bad(self, text): pass """, error="X.__mangle_bad", ) + yield Case( + stub=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="Klass.__Mangled1.__Mangled2.__mangle_bad", + ) + yield Case( + stub=""" + class __Dunder__: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class __Dunder__: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="__Dunder__.__mangle_bad", + ) + yield Case( + stub=""" + class _Private: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class _Private: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="_Private.__mangle_bad", + ) @collect_cases def test_mro(self) -> Iterator[Case]: From 59955ee5c67e9500f70eaa963daa0fdce6b0c24d Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Fri, 3 Feb 2023 19:25:03 +0000 Subject: [PATCH 243/292] Sync typeshed Source commit: https://github.com/python/typeshed/commit/37a180ef7b54a666832f56dfb20ff68b188d2b9c --- mypy/typeshed/LICENSE | 1 - mypy/typeshed/stdlib/_bisect.pyi | 40 +- mypy/typeshed/stdlib/_bootlocale.pyi | 2 +- mypy/typeshed/stdlib/_codecs.pyi | 100 +- mypy/typeshed/stdlib/_collections_abc.pyi | 2 +- mypy/typeshed/stdlib/_compression.pyi | 4 +- mypy/typeshed/stdlib/_curses.pyi | 30 +- mypy/typeshed/stdlib/_decimal.pyi | 80 +- mypy/typeshed/stdlib/_dummy_thread.pyi | 6 +- mypy/typeshed/stdlib/_dummy_threading.pyi | 40 +- mypy/typeshed/stdlib/_imp.pyi | 6 +- mypy/typeshed/stdlib/_markupbase.pyi | 4 +- mypy/typeshed/stdlib/_msi.pyi | 4 +- mypy/typeshed/stdlib/_operator.pyi | 2 +- mypy/typeshed/stdlib/_osx_support.pyi | 4 +- mypy/typeshed/stdlib/_random.pyi | 2 +- mypy/typeshed/stdlib/_sitebuiltins.pyi | 2 +- mypy/typeshed/stdlib/_tkinter.pyi | 38 +- mypy/typeshed/stdlib/_tracemalloc.pyi | 2 +- mypy/typeshed/stdlib/_warnings.pyi | 4 +- mypy/typeshed/stdlib/_weakref.pyi | 4 +- mypy/typeshed/stdlib/_weakrefset.pyi | 2 +- mypy/typeshed/stdlib/_winapi.pyi | 12 +- mypy/typeshed/stdlib/abc.pyi | 2 +- mypy/typeshed/stdlib/aifc.pyi | 4 +- mypy/typeshed/stdlib/argparse.pyi | 163 ++- mypy/typeshed/stdlib/array.pyi | 6 +- mypy/typeshed/stdlib/ast.pyi | 62 +- mypy/typeshed/stdlib/asynchat.pyi | 2 +- mypy/typeshed/stdlib/asyncio/base_events.pyi | 312 ++-- .../stdlib/asyncio/base_subprocess.pyi | 5 +- mypy/typeshed/stdlib/asyncio/events.pyi | 332 ++--- .../stdlib/asyncio/format_helpers.pyi | 4 +- mypy/typeshed/stdlib/asyncio/futures.pyi | 6 +- mypy/typeshed/stdlib/asyncio/locks.pyi | 28 +- .../stdlib/asyncio/proactor_events.pyi | 27 +- mypy/typeshed/stdlib/asyncio/queues.pyi | 6 +- mypy/typeshed/stdlib/asyncio/runners.pyi | 12 +- .../stdlib/asyncio/selector_events.pyi | 2 +- mypy/typeshed/stdlib/asyncio/sslproto.pyi | 40 +- mypy/typeshed/stdlib/asyncio/staggered.pyi | 2 +- mypy/typeshed/stdlib/asyncio/streams.pyi | 56 +- mypy/typeshed/stdlib/asyncio/subprocess.pyi | 54 +- mypy/typeshed/stdlib/asyncio/taskgroups.pyi | 2 +- mypy/typeshed/stdlib/asyncio/tasks.pyi | 90 +- mypy/typeshed/stdlib/asyncio/transports.pyi | 10 +- mypy/typeshed/stdlib/asyncio/unix_events.pyi | 1 - .../stdlib/asyncio/windows_events.pyi | 14 +- .../typeshed/stdlib/asyncio/windows_utils.pyi | 8 +- mypy/typeshed/stdlib/asyncore.pyi | 23 +- mypy/typeshed/stdlib/audioop.pyi | 4 +- mypy/typeshed/stdlib/base64.pyi | 18 +- mypy/typeshed/stdlib/bdb.pyi | 21 +- mypy/typeshed/stdlib/binascii.pyi | 12 +- mypy/typeshed/stdlib/builtins.pyi | 496 ++++--- mypy/typeshed/stdlib/bz2.pyi | 90 +- mypy/typeshed/stdlib/cProfile.pyi | 10 +- mypy/typeshed/stdlib/calendar.pyi | 49 +- mypy/typeshed/stdlib/cgi.pyi | 44 +- mypy/typeshed/stdlib/cgitb.pyi | 20 +- mypy/typeshed/stdlib/chunk.pyi | 6 +- mypy/typeshed/stdlib/cmath.pyi | 2 +- mypy/typeshed/stdlib/cmd.pyi | 6 +- mypy/typeshed/stdlib/code.pyi | 22 +- mypy/typeshed/stdlib/codecs.pyi | 91 +- mypy/typeshed/stdlib/codeop.pyi | 4 +- mypy/typeshed/stdlib/collections/__init__.pyi | 83 +- mypy/typeshed/stdlib/compileall.pyi | 140 +- .../stdlib/concurrent/futures/_base.pyi | 20 +- .../stdlib/concurrent/futures/process.pyi | 32 +- .../stdlib/concurrent/futures/thread.pyi | 6 +- mypy/typeshed/stdlib/configparser.pyi | 100 +- mypy/typeshed/stdlib/contextlib.pyi | 18 +- mypy/typeshed/stdlib/copy.pyi | 2 +- mypy/typeshed/stdlib/copyreg.pyi | 2 +- mypy/typeshed/stdlib/crypt.pyi | 4 +- mypy/typeshed/stdlib/csv.pyi | 22 +- mypy/typeshed/stdlib/ctypes/__init__.pyi | 33 +- mypy/typeshed/stdlib/curses/textpad.pyi | 4 +- mypy/typeshed/stdlib/dataclasses.pyi | 190 +-- mypy/typeshed/stdlib/datetime.pyi | 4 +- mypy/typeshed/stdlib/dbm/__init__.pyi | 2 +- mypy/typeshed/stdlib/dbm/dumb.pyi | 4 +- mypy/typeshed/stdlib/dbm/gnu.pyi | 2 +- mypy/typeshed/stdlib/dbm/ndbm.pyi | 2 +- mypy/typeshed/stdlib/difflib.pyi | 89 +- mypy/typeshed/stdlib/dis.pyi | 44 +- .../stdlib/distutils/archive_util.pyi | 24 +- mypy/typeshed/stdlib/distutils/ccompiler.pyi | 124 +- mypy/typeshed/stdlib/distutils/cmd.pyi | 46 +- .../stdlib/distutils/command/bdist_msi.pyi | 6 +- .../distutils/command/bdist_wininst.pyi | 2 +- .../stdlib/distutils/command/build_py.pyi | 2 +- .../stdlib/distutils/command/config.pyi | 60 +- .../stdlib/distutils/command/register.pyi | 2 +- mypy/typeshed/stdlib/distutils/core.pyi | 2 +- mypy/typeshed/stdlib/distutils/dep_util.pyi | 2 +- mypy/typeshed/stdlib/distutils/dir_util.pyi | 18 +- mypy/typeshed/stdlib/distutils/dist.pyi | 6 +- mypy/typeshed/stdlib/distutils/extension.pyi | 28 +- .../stdlib/distutils/fancy_getopt.pyi | 6 +- mypy/typeshed/stdlib/distutils/file_util.pyi | 2 +- mypy/typeshed/stdlib/distutils/filelist.pyi | 24 +- mypy/typeshed/stdlib/distutils/log.pyi | 2 +- mypy/typeshed/stdlib/distutils/spawn.pyi | 2 +- mypy/typeshed/stdlib/distutils/sysconfig.pyi | 4 +- mypy/typeshed/stdlib/distutils/text_file.pyi | 6 +- mypy/typeshed/stdlib/distutils/util.pyi | 26 +- mypy/typeshed/stdlib/distutils/version.pyi | 6 +- mypy/typeshed/stdlib/doctest.pyi | 103 +- .../stdlib/email/_header_value_parser.pyi | 4 +- mypy/typeshed/stdlib/email/base64mime.pyi | 4 +- mypy/typeshed/stdlib/email/charset.pyi | 4 +- mypy/typeshed/stdlib/email/errors.pyi | 2 +- mypy/typeshed/stdlib/email/feedparser.pyi | 4 +- mypy/typeshed/stdlib/email/generator.pyi | 24 +- mypy/typeshed/stdlib/email/header.pyi | 22 +- mypy/typeshed/stdlib/email/headerregistry.pyi | 6 +- mypy/typeshed/stdlib/email/iterators.pyi | 6 +- mypy/typeshed/stdlib/email/message.pyi | 75 +- .../stdlib/email/mime/application.pyi | 4 +- mypy/typeshed/stdlib/email/mime/audio.pyi | 4 +- mypy/typeshed/stdlib/email/mime/base.pyi | 2 +- mypy/typeshed/stdlib/email/mime/image.pyi | 4 +- mypy/typeshed/stdlib/email/mime/message.pyi | 2 +- mypy/typeshed/stdlib/email/mime/multipart.pyi | 8 +- mypy/typeshed/stdlib/email/mime/text.pyi | 4 +- mypy/typeshed/stdlib/email/parser.pyi | 18 +- mypy/typeshed/stdlib/email/quoprimime.pyi | 6 +- mypy/typeshed/stdlib/email/utils.pyi | 14 +- mypy/typeshed/stdlib/encodings/utf_8.pyi | 12 +- mypy/typeshed/stdlib/encodings/utf_8_sig.pyi | 14 +- mypy/typeshed/stdlib/ensurepip/__init__.pyi | 12 +- mypy/typeshed/stdlib/enum.pyi | 30 +- mypy/typeshed/stdlib/fcntl.pyi | 10 +- mypy/typeshed/stdlib/filecmp.pyi | 8 +- mypy/typeshed/stdlib/fileinput.pyi | 204 +-- mypy/typeshed/stdlib/formatter.pyi | 18 +- mypy/typeshed/stdlib/fractions.pyi | 8 +- mypy/typeshed/stdlib/ftplib.pyi | 82 +- mypy/typeshed/stdlib/functools.pyi | 20 +- mypy/typeshed/stdlib/gc.pyi | 4 +- mypy/typeshed/stdlib/getopt.pyi | 2 +- mypy/typeshed/stdlib/getpass.pyi | 2 +- mypy/typeshed/stdlib/gettext.pyi | 78 +- mypy/typeshed/stdlib/glob.pyi | 24 +- mypy/typeshed/stdlib/graphlib.pyi | 2 +- mypy/typeshed/stdlib/gzip.pyi | 86 +- mypy/typeshed/stdlib/hashlib.pyi | 46 +- mypy/typeshed/stdlib/heapq.pyi | 6 +- mypy/typeshed/stdlib/hmac.pyi | 4 +- mypy/typeshed/stdlib/html/__init__.pyi | 2 +- mypy/typeshed/stdlib/html/parser.pyi | 2 +- mypy/typeshed/stdlib/http/client.pyi | 42 +- mypy/typeshed/stdlib/http/cookiejar.pyi | 70 +- mypy/typeshed/stdlib/http/cookies.pyi | 14 +- mypy/typeshed/stdlib/http/server.pyi | 12 +- mypy/typeshed/stdlib/imaplib.pyi | 46 +- mypy/typeshed/stdlib/imghdr.pyi | 2 +- mypy/typeshed/stdlib/imp.pyi | 10 +- mypy/typeshed/stdlib/importlib/__init__.pyi | 10 +- mypy/typeshed/stdlib/importlib/abc.pyi | 10 +- mypy/typeshed/stdlib/importlib/machinery.pyi | 26 +- .../stdlib/importlib/metadata/__init__.pyi | 2 +- mypy/typeshed/stdlib/importlib/resources.pyi | 4 +- mypy/typeshed/stdlib/importlib/util.pyi | 10 +- mypy/typeshed/stdlib/inspect.pyi | 62 +- mypy/typeshed/stdlib/io.pyi | 34 +- mypy/typeshed/stdlib/ipaddress.pyi | 6 +- mypy/typeshed/stdlib/itertools.pyi | 2 +- mypy/typeshed/stdlib/json/__init__.pyi | 60 +- mypy/typeshed/stdlib/json/decoder.pyi | 14 +- mypy/typeshed/stdlib/json/encoder.pyi | 18 +- mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi | 14 +- mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi | 4 +- mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi | 8 +- mypy/typeshed/stdlib/lib2to3/pygram.pyi | 3 + mypy/typeshed/stdlib/lib2to3/pytree.pyi | 20 +- mypy/typeshed/stdlib/lib2to3/refactor.pyi | 20 +- mypy/typeshed/stdlib/linecache.pyi | 8 +- mypy/typeshed/stdlib/locale.pyi | 10 +- mypy/typeshed/stdlib/logging/__init__.pyi | 90 +- mypy/typeshed/stdlib/logging/config.pyi | 12 +- mypy/typeshed/stdlib/logging/handlers.pyi | 84 +- mypy/typeshed/stdlib/lzma.pyi | 100 +- mypy/typeshed/stdlib/mailbox.pyi | 47 +- mypy/typeshed/stdlib/mailcap.pyi | 2 +- mypy/typeshed/stdlib/marshal.pyi | 4 +- mypy/typeshed/stdlib/math.pyi | 10 +- mypy/typeshed/stdlib/mimetypes.pyi | 28 +- mypy/typeshed/stdlib/mmap.pyi | 4 +- mypy/typeshed/stdlib/modulefinder.pyi | 23 +- mypy/typeshed/stdlib/msilib/__init__.pyi | 36 +- mypy/typeshed/stdlib/msilib/sequence.pyi | 1 - mypy/typeshed/stdlib/msilib/text.pyi | 1 - .../stdlib/multiprocessing/connection.pyi | 20 +- .../stdlib/multiprocessing/context.pyi | 46 +- .../stdlib/multiprocessing/dummy/__init__.pyi | 14 +- .../multiprocessing/dummy/connection.pyi | 6 +- .../stdlib/multiprocessing/forkserver.pyi | 6 +- mypy/typeshed/stdlib/multiprocessing/heap.pyi | 2 +- .../stdlib/multiprocessing/managers.pyi | 40 +- mypy/typeshed/stdlib/multiprocessing/pool.pyi | 44 +- .../stdlib/multiprocessing/popen_fork.pyi | 4 +- .../multiprocessing/popen_spawn_win32.pyi | 2 +- .../stdlib/multiprocessing/process.pyi | 10 +- .../stdlib/multiprocessing/queues.pyi | 6 +- .../stdlib/multiprocessing/reduction.pyi | 18 +- .../multiprocessing/resource_sharer.pyi | 2 +- .../stdlib/multiprocessing/shared_memory.pyi | 6 +- .../stdlib/multiprocessing/sharedctypes.pyi | 32 +- .../typeshed/stdlib/multiprocessing/spawn.pyi | 2 +- .../stdlib/multiprocessing/synchronize.pyi | 16 +- mypy/typeshed/stdlib/multiprocessing/util.pyi | 12 +- mypy/typeshed/stdlib/netrc.pyi | 4 +- mypy/typeshed/stdlib/nntplib.pyi | 54 +- mypy/typeshed/stdlib/ntpath.pyi | 4 +- mypy/typeshed/stdlib/numbers.pyi | 4 +- mypy/typeshed/stdlib/opcode.pyi | 4 +- mypy/typeshed/stdlib/optparse.pyi | 48 +- mypy/typeshed/stdlib/os/__init__.pyi | 141 +- mypy/typeshed/stdlib/pathlib.pyi | 60 +- mypy/typeshed/stdlib/pdb.pyi | 26 +- mypy/typeshed/stdlib/pickle.pyi | 28 +- mypy/typeshed/stdlib/pickletools.pyi | 10 +- mypy/typeshed/stdlib/pkgutil.pyi | 8 +- mypy/typeshed/stdlib/platform.pyi | 22 +- mypy/typeshed/stdlib/plistlib.pyi | 22 +- mypy/typeshed/stdlib/poplib.pyi | 16 +- mypy/typeshed/stdlib/posixpath.pyi | 10 +- mypy/typeshed/stdlib/pprint.pyi | 100 +- mypy/typeshed/stdlib/profile.pyi | 10 +- mypy/typeshed/stdlib/pstats.pyi | 4 +- mypy/typeshed/stdlib/py_compile.pyi | 26 +- mypy/typeshed/stdlib/pyclbr.pyi | 18 +- mypy/typeshed/stdlib/pydoc.pyi | 115 +- mypy/typeshed/stdlib/pyexpat/__init__.pyi | 6 +- mypy/typeshed/stdlib/queue.pyi | 10 +- mypy/typeshed/stdlib/quopri.pyi | 8 +- mypy/typeshed/stdlib/random.pyi | 26 +- mypy/typeshed/stdlib/re.pyi | 104 +- mypy/typeshed/stdlib/readline.pyi | 16 +- mypy/typeshed/stdlib/reprlib.pyi | 2 +- mypy/typeshed/stdlib/rlcompleter.pyi | 2 +- mypy/typeshed/stdlib/runpy.pyi | 10 +- mypy/typeshed/stdlib/sched.pyi | 2 +- mypy/typeshed/stdlib/secrets.pyi | 6 +- mypy/typeshed/stdlib/select.pyi | 10 +- mypy/typeshed/stdlib/selectors.pyi | 30 +- mypy/typeshed/stdlib/shelve.pyi | 6 +- mypy/typeshed/stdlib/shlex.pyi | 14 +- mypy/typeshed/stdlib/shutil.pyi | 65 +- mypy/typeshed/stdlib/signal.pyi | 2 +- mypy/typeshed/stdlib/site.pyi | 6 +- mypy/typeshed/stdlib/smtpd.pyi | 16 +- mypy/typeshed/stdlib/smtplib.pyi | 64 +- mypy/typeshed/stdlib/socket.pyi | 87 +- mypy/typeshed/stdlib/socketserver.pyi | 10 +- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 30 +- mypy/typeshed/stdlib/sre_compile.pyi | 2 +- mypy/typeshed/stdlib/sre_constants.pyi | 6 +- mypy/typeshed/stdlib/sre_parse.pyi | 12 +- mypy/typeshed/stdlib/ssl.pyi | 115 +- mypy/typeshed/stdlib/statistics.pyi | 26 +- mypy/typeshed/stdlib/string.pyi | 4 +- mypy/typeshed/stdlib/struct.pyi | 4 +- mypy/typeshed/stdlib/subprocess.pyi | 1295 ++++++++--------- mypy/typeshed/stdlib/sunau.pyi | 8 +- mypy/typeshed/stdlib/symtable.pyi | 4 +- mypy/typeshed/stdlib/sys.pyi | 6 +- mypy/typeshed/stdlib/sysconfig.pyi | 6 +- mypy/typeshed/stdlib/tarfile.pyi | 98 +- mypy/typeshed/stdlib/telnetlib.pyi | 8 +- mypy/typeshed/stdlib/tempfile.pyi | 337 +++-- mypy/typeshed/stdlib/textwrap.pyi | 30 +- mypy/typeshed/stdlib/threading.pyi | 40 +- mypy/typeshed/stdlib/timeit.pyi | 24 +- mypy/typeshed/stdlib/tkinter/__init__.pyi | 384 ++--- mypy/typeshed/stdlib/tkinter/colorchooser.pyi | 4 +- mypy/typeshed/stdlib/tkinter/commondialog.pyi | 2 +- mypy/typeshed/stdlib/tkinter/dialog.pyi | 2 +- mypy/typeshed/stdlib/tkinter/dnd.pyi | 4 +- mypy/typeshed/stdlib/tkinter/filedialog.pyi | 16 +- mypy/typeshed/stdlib/tkinter/font.pyi | 30 +- mypy/typeshed/stdlib/tkinter/messagebox.pyi | 16 +- mypy/typeshed/stdlib/tkinter/scrolledtext.pyi | 2 +- mypy/typeshed/stdlib/tkinter/simpledialog.pyi | 16 +- mypy/typeshed/stdlib/tkinter/tix.pyi | 72 +- mypy/typeshed/stdlib/tkinter/ttk.pyi | 164 +-- mypy/typeshed/stdlib/trace.pyi | 30 +- mypy/typeshed/stdlib/traceback.pyi | 108 +- mypy/typeshed/stdlib/tracemalloc.pyi | 15 +- mypy/typeshed/stdlib/tty.pyi | 4 +- mypy/typeshed/stdlib/turtle.pyi | 168 +-- mypy/typeshed/stdlib/types.pyi | 54 +- mypy/typeshed/stdlib/typing.pyi | 58 +- mypy/typeshed/stdlib/typing_extensions.pyi | 34 +- mypy/typeshed/stdlib/unittest/case.pyi | 104 +- mypy/typeshed/stdlib/unittest/loader.pyi | 16 +- mypy/typeshed/stdlib/unittest/main.pyi | 26 +- mypy/typeshed/stdlib/unittest/mock.pyi | 106 +- mypy/typeshed/stdlib/unittest/result.pyi | 2 +- mypy/typeshed/stdlib/unittest/runner.pyi | 16 +- mypy/typeshed/stdlib/unittest/signals.pyi | 2 +- mypy/typeshed/stdlib/unittest/suite.pyi | 2 +- mypy/typeshed/stdlib/unittest/util.pyi | 2 +- mypy/typeshed/stdlib/urllib/error.pyi | 2 +- mypy/typeshed/stdlib/urllib/parse.pyi | 90 +- mypy/typeshed/stdlib/urllib/request.pyi | 98 +- mypy/typeshed/stdlib/urllib/response.pyi | 2 +- mypy/typeshed/stdlib/urllib/robotparser.pyi | 2 +- mypy/typeshed/stdlib/uu.pyi | 6 +- mypy/typeshed/stdlib/uuid.pyi | 23 +- mypy/typeshed/stdlib/venv/__init__.pyi | 52 +- mypy/typeshed/stdlib/warnings.pyi | 58 +- mypy/typeshed/stdlib/wave.pyi | 8 +- mypy/typeshed/stdlib/weakref.pyi | 14 +- mypy/typeshed/stdlib/webbrowser.pyi | 24 +- mypy/typeshed/stdlib/winreg.pyi | 8 +- mypy/typeshed/stdlib/winsound.pyi | 2 +- mypy/typeshed/stdlib/wsgiref/handlers.pyi | 6 +- mypy/typeshed/stdlib/wsgiref/headers.pyi | 4 +- mypy/typeshed/stdlib/wsgiref/util.pyi | 4 +- mypy/typeshed/stdlib/xml/dom/domreg.pyi | 4 +- mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi | 14 +- mypy/typeshed/stdlib/xml/dom/minidom.pyi | 52 +- mypy/typeshed/stdlib/xml/dom/pulldom.pyi | 6 +- mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi | 13 +- .../stdlib/xml/etree/ElementInclude.pyi | 6 +- .../typeshed/stdlib/xml/etree/ElementPath.pyi | 10 +- .../typeshed/stdlib/xml/etree/ElementTree.pyi | 154 +- mypy/typeshed/stdlib/xml/sax/__init__.pyi | 2 +- mypy/typeshed/stdlib/xml/sax/saxutils.pyi | 10 +- mypy/typeshed/stdlib/xml/sax/xmlreader.pyi | 6 +- mypy/typeshed/stdlib/xmlrpc/client.pyi | 83 +- mypy/typeshed/stdlib/xmlrpc/server.pyi | 54 +- mypy/typeshed/stdlib/zipapp.pyi | 10 +- mypy/typeshed/stdlib/zipfile.pyi | 94 +- mypy/typeshed/stdlib/zipimport.pyi | 6 +- mypy/typeshed/stdlib/zlib.pyi | 19 +- mypy/typeshed/stdlib/zoneinfo/__init__.pyi | 2 +- 341 files changed, 6101 insertions(+), 5626 deletions(-) diff --git a/mypy/typeshed/LICENSE b/mypy/typeshed/LICENSE index e5833ae4231d..13264487581f 100644 --- a/mypy/typeshed/LICENSE +++ b/mypy/typeshed/LICENSE @@ -235,4 +235,3 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. = = = = = - diff --git a/mypy/typeshed/stdlib/_bisect.pyi b/mypy/typeshed/stdlib/_bisect.pyi index d902e1eea7d4..4c79eec14d72 100644 --- a/mypy/typeshed/stdlib/_bisect.pyi +++ b/mypy/typeshed/stdlib/_bisect.pyi @@ -8,67 +8,67 @@ _T = TypeVar("_T") if sys.version_info >= (3, 10): @overload def bisect_left( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None ) -> int: ... @overload def bisect_left( a: Sequence[_T], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: Callable[[_T], SupportsRichComparisonT] = ..., + key: Callable[[_T], SupportsRichComparisonT], ) -> int: ... @overload def bisect_right( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None ) -> int: ... @overload def bisect_right( a: Sequence[_T], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: Callable[[_T], SupportsRichComparisonT] = ..., + key: Callable[[_T], SupportsRichComparisonT], ) -> int: ... @overload def insort_left( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: None = ..., + key: None = None, ) -> None: ... @overload def insort_left( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] ) -> None: ... @overload def insort_right( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: None = ..., + key: None = None, ) -> None: ... @overload def insort_right( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] ) -> None: ... else: def bisect_left( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> int: ... def bisect_right( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> int: ... def insort_left( - a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: ... def insort_right( - a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: ... diff --git a/mypy/typeshed/stdlib/_bootlocale.pyi b/mypy/typeshed/stdlib/_bootlocale.pyi index ee2d89347a9f..233d4934f3c6 100644 --- a/mypy/typeshed/stdlib/_bootlocale.pyi +++ b/mypy/typeshed/stdlib/_bootlocale.pyi @@ -1 +1 @@ -def getpreferredencoding(do_setlocale: bool = ...) -> str: ... +def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index 232256fbf614..44cc0f78028c 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -45,92 +45,94 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... @overload -def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[misc] @overload -def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... +def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @overload -def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[misc] @overload -def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # these are documented as text encodings but in practice they also accept str as input @overload def decode( - obj: str, encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], errors: str = ... + obj: str, + encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], + errors: str = "strict", ) -> str: ... # hex is officially documented as a bytes to bytes encoding, but it appears to also work with str @overload -def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... @overload -def decode(obj: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str: ... +def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... def lookup(__encoding: str) -> codecs.CodecInfo: ... def charmap_build(__map: str) -> _CharMap: ... -def ascii_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... -def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def charmap_decode(__data: ReadableBuffer, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... -def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ... -def escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... -def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... -def latin_1_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... -def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def ascii_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[str, int]: ... +def charmap_encode(__str: str, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[bytes, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def escape_encode(__data: bytes, __errors: str | None = None) -> tuple[bytes, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def latin_1_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): def raw_unicode_escape_decode( - __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True ) -> tuple[str, int]: ... else: - def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... -def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... +def raw_unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): def unicode_escape_decode( - __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True ) -> tuple[str, int]: ... else: - def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... -def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info < (3, 8): - def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... - def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_16_ex_decode( - __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: int = False ) -> tuple[str, int, int]: ... -def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_32_ex_decode( - __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: int = False ) -> tuple[str, int, int]: ... -def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_7_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_8_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_7_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_8_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... - def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... + def mbcs_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... def code_page_decode( - __codepage: int, __data: ReadableBuffer, __errors: str | None = ..., __final: int = ... + __codepage: int, __data: ReadableBuffer, __errors: str | None = None, __final: int = False ) -> tuple[str, int]: ... - def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def oem_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... - def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def code_page_encode(__code_page: int, __str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... + def oem_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_collections_abc.pyi b/mypy/typeshed/stdlib/_collections_abc.pyi index 8373fe836330..352da6cfb331 100644 --- a/mypy/typeshed/stdlib/_collections_abc.pyi +++ b/mypy/typeshed/stdlib/_collections_abc.pyi @@ -1,6 +1,6 @@ import sys from types import MappingProxyType -from typing import ( # noqa: Y027,Y038 +from typing import ( # noqa: Y022,Y038 AbstractSet as Set, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, diff --git a/mypy/typeshed/stdlib/_compression.pyi b/mypy/typeshed/stdlib/_compression.pyi index 7047a7bcd325..817f251586b2 100644 --- a/mypy/typeshed/stdlib/_compression.pyi +++ b/mypy/typeshed/stdlib/_compression.pyi @@ -21,5 +21,5 @@ class DecompressReader(RawIOBase): **decomp_args: Any, ) -> None: ... def readinto(self, b: WriteableBuffer) -> int: ... - def read(self, size: int = ...) -> bytes: ... - def seek(self, offset: int, whence: int = ...) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index 7053e85f7b7f..61881fc09199 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -274,7 +274,7 @@ if sys.platform != "win32": def baudrate() -> int: ... def beep() -> None: ... def can_change_color() -> bool: ... - def cbreak(__flag: bool = ...) -> None: ... + def cbreak(__flag: bool = True) -> None: ... def color_content(__color_number: int) -> tuple[int, int, int]: ... # Changed in Python 3.8.8 and 3.9.2 if sys.version_info >= (3, 8): @@ -287,7 +287,7 @@ if sys.platform != "win32": def def_shell_mode() -> None: ... def delay_output(__ms: int) -> None: ... def doupdate() -> None: ... - def echo(__flag: bool = ...) -> None: ... + def echo(__flag: bool = True) -> None: ... def endwin() -> None: ... def erasechar() -> bytes: ... def filter() -> None: ... @@ -323,7 +323,7 @@ if sys.platform != "win32": def napms(__ms: int) -> int: ... def newpad(__nlines: int, __ncols: int) -> _CursesWindow: ... def newwin(__nlines: int, __ncols: int, __begin_y: int = ..., __begin_x: int = ...) -> _CursesWindow: ... - def nl(__flag: bool = ...) -> None: ... + def nl(__flag: bool = True) -> None: ... def nocbreak() -> None: ... def noecho() -> None: ... def nonl() -> None: ... @@ -332,8 +332,8 @@ if sys.platform != "win32": def pair_content(__pair_number: int) -> tuple[int, int]: ... def pair_number(__attr: int) -> int: ... def putp(__string: ReadOnlyBuffer) -> None: ... - def qiflush(__flag: bool = ...) -> None: ... - def raw(__flag: bool = ...) -> None: ... + def qiflush(__flag: bool = True) -> None: ... + def raw(__flag: bool = True) -> None: ... def reset_prog_mode() -> None: ... def reset_shell_mode() -> None: ... def resetty() -> None: ... @@ -345,7 +345,7 @@ if sys.platform != "win32": def set_tabsize(__size: int) -> None: ... def setsyx(__y: int, __x: int) -> None: ... - def setupterm(term: str | None = ..., fd: int = ...) -> None: ... + def setupterm(term: str | None = None, fd: int = -1) -> None: ... def start_color() -> None: ... def termattrs() -> int: ... def termname() -> bytes: ... @@ -354,15 +354,15 @@ if sys.platform != "win32": def tigetstr(__capname: str) -> bytes | None: ... def tparm( __str: ReadOnlyBuffer, - __i1: int = ..., - __i2: int = ..., - __i3: int = ..., - __i4: int = ..., - __i5: int = ..., - __i6: int = ..., - __i7: int = ..., - __i8: int = ..., - __i9: int = ..., + __i1: int = 0, + __i2: int = 0, + __i3: int = 0, + __i4: int = 0, + __i5: int = 0, + __i6: int = 0, + __i7: int = 0, + __i8: int = 0, + __i9: int = 0, ) -> bytes: ... def typeahead(__fd: int) -> None: ... def unctrl(__ch: _ChType) -> bytes: ... diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index ca97f69e2147..38b8ac30cc2f 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -53,7 +53,7 @@ def getcontext() -> Context: ... if sys.version_info >= (3, 11): def localcontext( - ctx: Context | None = ..., + ctx: Context | None = None, *, prec: int | None = ..., rounding: str | None = ..., @@ -66,17 +66,17 @@ if sys.version_info >= (3, 11): ) -> _ContextManager: ... else: - def localcontext(ctx: Context | None = ...) -> _ContextManager: ... + def localcontext(ctx: Context | None = None) -> _ContextManager: ... class Decimal: def __new__(cls: type[Self], value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... @classmethod def from_float(cls: type[Self], __f: float) -> Self: ... def __bool__(self) -> bool: ... - def compare(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def as_tuple(self) -> DecimalTuple: ... def as_integer_ratio(self) -> tuple[int, int]: ... - def to_eng_string(self, context: Context | None = ...) -> str: ... + def to_eng_string(self, context: Context | None = None) -> str: ... def __abs__(self) -> Decimal: ... def __add__(self, __other: _Decimal) -> Decimal: ... def __divmod__(self, __other: _Decimal) -> tuple[Decimal, Decimal]: ... @@ -100,7 +100,7 @@ class Decimal: def __rtruediv__(self, __other: _Decimal) -> Decimal: ... def __sub__(self, __other: _Decimal) -> Decimal: ... def __truediv__(self, __other: _Decimal) -> Decimal: ... - def remainder_near(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __trunc__(self) -> int: ... @@ -116,53 +116,53 @@ class Decimal: def __round__(self, __ndigits: int) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... - def fma(self, other: _Decimal, third: _Decimal, context: Context | None = ...) -> Decimal: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... def __rpow__(self, __other: _Decimal, __context: Context | None = ...) -> Decimal: ... - def normalize(self, context: Context | None = ...) -> Decimal: ... - def quantize(self, exp: _Decimal, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def same_quantum(self, other: _Decimal, context: Context | None = ...) -> bool: ... - def to_integral_exact(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def to_integral_value(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def to_integral(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def sqrt(self, context: Context | None = ...) -> Decimal: ... - def max(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def min(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def adjusted(self) -> int: ... def canonical(self) -> Decimal: ... - def compare_signal(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def compare_total(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def compare_total_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def copy_abs(self) -> Decimal: ... def copy_negate(self) -> Decimal: ... - def copy_sign(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def exp(self, context: Context | None = ...) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... def is_canonical(self) -> bool: ... def is_finite(self) -> bool: ... def is_infinite(self) -> bool: ... def is_nan(self) -> bool: ... - def is_normal(self, context: Context | None = ...) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... def is_qnan(self) -> bool: ... def is_signed(self) -> bool: ... def is_snan(self) -> bool: ... - def is_subnormal(self, context: Context | None = ...) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... def is_zero(self) -> bool: ... - def ln(self, context: Context | None = ...) -> Decimal: ... - def log10(self, context: Context | None = ...) -> Decimal: ... - def logb(self, context: Context | None = ...) -> Decimal: ... - def logical_and(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def logical_invert(self, context: Context | None = ...) -> Decimal: ... - def logical_or(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def logical_xor(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def max_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def min_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def next_minus(self, context: Context | None = ...) -> Decimal: ... - def next_plus(self, context: Context | None = ...) -> Decimal: ... - def next_toward(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def number_class(self, context: Context | None = ...) -> str: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... def radix(self) -> Decimal: ... - def rotate(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def scaleb(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def shift(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def __reduce__(self: Self) -> tuple[type[Self], tuple[str]]: ... def __copy__(self: Self) -> Self: ... def __deepcopy__(self: Self, __memo: Any) -> Self: ... @@ -212,7 +212,7 @@ class Context: __hash__: ClassVar[None] # type: ignore[assignment] def Etiny(self) -> int: ... def Etop(self) -> int: ... - def create_decimal(self, __num: _DecimalNew = ...) -> Decimal: ... + def create_decimal(self, __num: _DecimalNew = "0") -> Decimal: ... def create_decimal_from_float(self, __f: float) -> Decimal: ... def abs(self, __x: _Decimal) -> Decimal: ... def add(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... @@ -259,7 +259,7 @@ class Context: def normalize(self, __x: _Decimal) -> Decimal: ... def number_class(self, __x: _Decimal) -> str: ... def plus(self, __x: _Decimal) -> Decimal: ... - def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = ...) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... def quantize(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... def radix(self) -> Decimal: ... def remainder(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... diff --git a/mypy/typeshed/stdlib/_dummy_thread.pyi b/mypy/typeshed/stdlib/_dummy_thread.pyi index ff16b1d3dcf4..e371dd0e9933 100644 --- a/mypy/typeshed/stdlib/_dummy_thread.pyi +++ b/mypy/typeshed/stdlib/_dummy_thread.pyi @@ -11,12 +11,12 @@ def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwa def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... -def stack_size(size: int | None = ...) -> int: ... +def stack_size(size: int | None = None) -> int: ... class LockType: locked_status: bool - def acquire(self, waitflag: bool | None = ..., timeout: int = ...) -> bool: ... - def __enter__(self, waitflag: bool | None = ..., timeout: int = ...) -> bool: ... + def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... + def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/_dummy_threading.pyi b/mypy/typeshed/stdlib/_dummy_threading.pyi index 8f7f5a9b994c..9a49dfa9649e 100644 --- a/mypy/typeshed/stdlib/_dummy_threading.pyi +++ b/mypy/typeshed/stdlib/_dummy_threading.pyi @@ -41,7 +41,7 @@ def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... def settrace(func: TraceFunction) -> None: ... def setprofile(func: ProfileFunction | None) -> None: ... -def stack_size(size: int = ...) -> int: ... +def stack_size(size: int | None = None) -> int: ... TIMEOUT_MAX: float @@ -59,17 +59,17 @@ class Thread: def ident(self) -> int | None: ... def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., + kwargs: Mapping[str, Any] | None = None, *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def start(self) -> None: ... def run(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... def getName(self) -> str: ... def setName(self, name: str) -> None: ... if sys.version_info >= (3, 8): @@ -99,32 +99,32 @@ class _RLock: def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... RLock = _RLock class Condition: - def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = ...) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... class Semaphore: - def __init__(self, value: int = ...) -> None: ... + def __init__(self, value: int = 1) -> None: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... - def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... - def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 9): def release(self, n: int = ...) -> None: ... else: @@ -136,7 +136,7 @@ class Event: def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 8): from _thread import _excepthook, _ExceptHookArgs @@ -149,8 +149,8 @@ class Timer(Thread): self, interval: float, function: Callable[..., object], - args: Iterable[Any] | None = ..., - kwargs: Mapping[str, Any] | None = ..., + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, ) -> None: ... def cancel(self) -> None: ... @@ -161,8 +161,8 @@ class Barrier: def n_waiting(self) -> int: ... @property def broken(self) -> bool: ... - def __init__(self, parties: int, action: Callable[[], None] | None = ..., timeout: float | None = ...) -> None: ... - def wait(self, timeout: float | None = ...) -> int: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... diff --git a/mypy/typeshed/stdlib/_imp.pyi b/mypy/typeshed/stdlib/_imp.pyi index 2b54a0f6fb42..adab2e803efe 100644 --- a/mypy/typeshed/stdlib/_imp.pyi +++ b/mypy/typeshed/stdlib/_imp.pyi @@ -8,7 +8,7 @@ check_hash_based_pycs: str def source_hash(key: int, source: ReadableBuffer) -> bytes: ... def create_builtin(__spec: ModuleSpec) -> types.ModuleType: ... -def create_dynamic(__spec: ModuleSpec, __file: Any = ...) -> types.ModuleType: ... +def create_dynamic(__spec: ModuleSpec, __file: Any = None) -> types.ModuleType: ... def acquire_lock() -> None: ... def exec_builtin(__mod: types.ModuleType) -> int: ... def exec_dynamic(__mod: types.ModuleType) -> int: ... @@ -21,8 +21,8 @@ def lock_held() -> bool: ... def release_lock() -> None: ... if sys.version_info >= (3, 11): - def find_frozen(__name: str, *, withdata: bool = ...) -> tuple[memoryview | None, bool, str | None] | None: ... - def get_frozen_object(__name: str, __data: ReadableBuffer | None = ...) -> types.CodeType: ... + def find_frozen(__name: str, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(__name: str, __data: ReadableBuffer | None = None) -> types.CodeType: ... else: def get_frozen_object(__name: str) -> types.CodeType: ... diff --git a/mypy/typeshed/stdlib/_markupbase.pyi b/mypy/typeshed/stdlib/_markupbase.pyi index 7d2a39a7aaea..62bad25e5ccc 100644 --- a/mypy/typeshed/stdlib/_markupbase.pyi +++ b/mypy/typeshed/stdlib/_markupbase.pyi @@ -5,9 +5,9 @@ class ParserBase: def reset(self) -> None: ... def getpos(self) -> tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... - def parse_comment(self, i: int, report: int = ...) -> int: ... # undocumented + def parse_comment(self, i: int, report: int = 1) -> int: ... # undocumented def parse_declaration(self, i: int) -> int: ... # undocumented - def parse_marked_section(self, i: int, report: int = ...) -> int: ... # undocumented + def parse_marked_section(self, i: int, report: int = 1) -> int: ... # undocumented def updatepos(self, i: int, j: int) -> int: ... # undocumented if sys.version_info < (3, 10): # Removed from ParserBase: https://bugs.python.org/issue31844 diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi index 1b86904d5ebc..2fdbdfd0e9f4 100644 --- a/mypy/typeshed/stdlib/_msi.pyi +++ b/mypy/typeshed/stdlib/_msi.pyi @@ -1,7 +1,6 @@ import sys if sys.platform == "win32": - # Actual typename View, not exposed by the implementation class _View: def Execute(self, params: _Record | None = ...) -> None: ... @@ -12,6 +11,7 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] + # Actual typename SummaryInformation, not exposed by the implementation class _SummaryInformation: def GetProperty(self, field: int) -> int | bytes | None: ... @@ -21,6 +21,7 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] + # Actual typename Database, not exposed by the implementation class _Database: def OpenView(self, sql: str) -> _View: ... @@ -30,6 +31,7 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] + # Actual typename Record, not exposed by the implementation class _Record: def GetFieldCount(self) -> int: ... diff --git a/mypy/typeshed/stdlib/_operator.pyi b/mypy/typeshed/stdlib/_operator.pyi index 7488724caf74..e7d1a98c4027 100644 --- a/mypy/typeshed/stdlib/_operator.pyi +++ b/mypy/typeshed/stdlib/_operator.pyi @@ -88,7 +88,7 @@ def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... def setitem(__a: MutableSequence[_T], __b: slice, __c: Sequence[_T]) -> None: ... @overload def setitem(__a: MutableMapping[_K, _V], __b: _K, __c: _V) -> None: ... -def length_hint(__obj: object, __default: int = ...) -> int: ... +def length_hint(__obj: object, __default: int = 0) -> int: ... @final class attrgetter(Generic[_T_co]): @overload diff --git a/mypy/typeshed/stdlib/_osx_support.pyi b/mypy/typeshed/stdlib/_osx_support.pyi index 7fd0ee922ca6..3eb6f4ddc67c 100644 --- a/mypy/typeshed/stdlib/_osx_support.pyi +++ b/mypy/typeshed/stdlib/_osx_support.pyi @@ -12,10 +12,10 @@ _UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented _COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented _INITPRE: str # undocumented -def _find_executable(executable: str, path: str | None = ...) -> str | None: ... # undocumented +def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented if sys.version_info >= (3, 8): - def _read_output(commandstring: str, capture_stderr: bool = ...) -> str | None: ... # undocumented + def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented else: def _read_output(commandstring: str) -> str | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/_random.pyi b/mypy/typeshed/stdlib/_random.pyi index c4b235f0cd5b..7c5803ede781 100644 --- a/mypy/typeshed/stdlib/_random.pyi +++ b/mypy/typeshed/stdlib/_random.pyi @@ -5,7 +5,7 @@ _State: TypeAlias = tuple[int, ...] class Random: def __init__(self, seed: object = ...) -> None: ... - def seed(self, __n: object = ...) -> None: ... + def seed(self, __n: object = None) -> None: ... def getstate(self) -> _State: ... def setstate(self, __state: _State) -> None: ... def random(self) -> float: ... diff --git a/mypy/typeshed/stdlib/_sitebuiltins.pyi b/mypy/typeshed/stdlib/_sitebuiltins.pyi index 4a35921e1ef7..3bda2d88425d 100644 --- a/mypy/typeshed/stdlib/_sitebuiltins.pyi +++ b/mypy/typeshed/stdlib/_sitebuiltins.pyi @@ -6,7 +6,7 @@ class Quitter: name: str eof: str def __init__(self, name: str, eof: str) -> None: ... - def __call__(self, code: int | None = ...) -> NoReturn: ... + def __call__(self, code: int | None = None) -> NoReturn: ... class _Printer: MAXLINES: ClassVar[Literal[23]] diff --git a/mypy/typeshed/stdlib/_tkinter.pyi b/mypy/typeshed/stdlib/_tkinter.pyi index fced8c95d2fa..271fd37df68b 100644 --- a/mypy/typeshed/stdlib/_tkinter.pyi +++ b/mypy/typeshed/stdlib/_tkinter.pyi @@ -60,7 +60,7 @@ class TkappType: def createtimerhandler(self, __milliseconds, __func): ... def deletecommand(self, __name): ... - def dooneevent(self, __flags: int = ...): ... + def dooneevent(self, __flags: int = 0): ... def eval(self, __script: str) -> str: ... def evalfile(self, __fileName): ... def exprboolean(self, __s): ... @@ -76,7 +76,7 @@ class TkappType: def globalunsetvar(self, *args, **kwargs): ... def interpaddr(self): ... def loadtk(self) -> None: ... - def mainloop(self, __threshold: int = ...): ... + def mainloop(self, __threshold: int = 0): ... def quit(self): ... def record(self, __script): ... def setvar(self, *ags, **kwargs): ... @@ -107,15 +107,29 @@ TK_VERSION: str class TkttType: def deletetimerhandler(self): ... -def create( - __screenName: str | None = ..., - __baseName: str | None = ..., - __className: str = ..., - __interactive: bool = ..., - __wantobjects: bool = ..., - __wantTk: bool = ..., - __sync: bool = ..., - __use: str | None = ..., -): ... +if sys.version_info >= (3, 8): + def create( + __screenName: str | None = None, + __baseName: str = "", + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + +else: + def create( + __screenName: str | None = None, + __baseName: str | None = None, + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + def getbusywaitinterval(): ... def setbusywaitinterval(__new_val): ... diff --git a/mypy/typeshed/stdlib/_tracemalloc.pyi b/mypy/typeshed/stdlib/_tracemalloc.pyi index 2262d4b16b3a..1b79d9dc5785 100644 --- a/mypy/typeshed/stdlib/_tracemalloc.pyi +++ b/mypy/typeshed/stdlib/_tracemalloc.pyi @@ -13,5 +13,5 @@ def is_tracing() -> bool: ... if sys.version_info >= (3, 9): def reset_peak() -> None: ... -def start(__nframe: int = ...) -> None: ... +def start(__nframe: int = 1) -> None: ... def stop() -> None: ... diff --git a/mypy/typeshed/stdlib/_warnings.pyi b/mypy/typeshed/stdlib/_warnings.pyi index 2eb9ae478a5d..0981dfeaafee 100644 --- a/mypy/typeshed/stdlib/_warnings.pyi +++ b/mypy/typeshed/stdlib/_warnings.pyi @@ -5,9 +5,9 @@ _onceregistry: dict[Any, Any] filters: list[tuple[str, str | None, type[Warning], str | None, int]] @overload -def warn(message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... +def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload -def warn(message: Warning, category: Any = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... +def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload def warn_explicit( message: str, diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index 742bc3ad9f36..df462ad859c7 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -33,6 +33,6 @@ def getweakrefs(__object: Any) -> list[Any]: ... # Return CallableProxyType if object is callable, ProxyType otherwise @overload -def proxy(__object: _C, __callback: Callable[[_C], Any] | None = ...) -> CallableProxyType[_C]: ... +def proxy(__object: _C, __callback: Callable[[_C], Any] | None = None) -> CallableProxyType[_C]: ... @overload -def proxy(__object: _T, __callback: Callable[[_T], Any] | None = ...) -> Any: ... +def proxy(__object: _T, __callback: Callable[[_T], Any] | None = None) -> Any: ... diff --git a/mypy/typeshed/stdlib/_weakrefset.pyi b/mypy/typeshed/stdlib/_weakrefset.pyi index da09442e855b..fdf26641bbeb 100644 --- a/mypy/typeshed/stdlib/_weakrefset.pyi +++ b/mypy/typeshed/stdlib/_weakrefset.pyi @@ -13,7 +13,7 @@ _T = TypeVar("_T") class WeakSet(MutableSet[_T], Generic[_T]): @overload - def __init__(self, data: None = ...) -> None: ... + def __init__(self, data: None = None) -> None: ... @overload def __init__(self, data: Iterable[_T]) -> None: ... def add(self, item: _T) -> None: ... diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index 4fbefc33abb1..5e0087e29934 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -128,7 +128,7 @@ if sys.platform == "win32": @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload - def ConnectNamedPipe(handle: int, overlapped: Literal[False] = ...) -> None: ... + def ConnectNamedPipe(handle: int, overlapped: Literal[False] = False) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... def CreateFile( @@ -169,7 +169,7 @@ if sys.platform == "win32": __target_process_handle: int, __desired_access: int, __inherit_handle: bool, - __options: int = ..., + __options: int = 0, ) -> int: ... def ExitProcess(__ExitCode: int) -> NoReturn: ... def GetACP() -> int: ... @@ -181,7 +181,7 @@ if sys.platform == "win32": def GetStdHandle(__std_handle: int) -> int: ... def GetVersion() -> int: ... def OpenProcess(__desired_access: int, __inherit_handle: bool, __process_id: int) -> int: ... - def PeekNamedPipe(__handle: int, __size: int = ...) -> tuple[int, int] | tuple[bytes, int, int]: ... + def PeekNamedPipe(__handle: int, __size: int = 0) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... def UnmapViewOfFile(__address: int) -> None: ... @@ -189,20 +189,20 @@ if sys.platform == "win32": @overload def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def ReadFile(handle: int, size: int, overlapped: Literal[False] = ...) -> tuple[bytes, int]: ... + def ReadFile(handle: int, size: int, overlapped: Literal[False] = False) -> tuple[bytes, int]: ... @overload def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ... def SetNamedPipeHandleState( __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None ) -> None: ... def TerminateProcess(__handle: int, __exit_code: int) -> None: ... - def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = ...) -> int: ... + def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = 4294967295) -> int: ... def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... def WaitNamedPipe(__name: str, __timeout: int) -> None: ... @overload def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = ...) -> tuple[int, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = False) -> tuple[int, int]: ... @overload def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index 7b39c88ed394..44a5b2289832 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -20,7 +20,7 @@ class ABCMeta(type): def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... - def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = ...) -> None: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... def abstractmethod(funcobj: _FuncT) -> _FuncT: ... diff --git a/mypy/typeshed/stdlib/aifc.pyi b/mypy/typeshed/stdlib/aifc.pyi index 14e824f3d22e..ad126d6cdbef 100644 --- a/mypy/typeshed/stdlib/aifc.pyi +++ b/mypy/typeshed/stdlib/aifc.pyi @@ -81,7 +81,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): @overload @@ -89,4 +89,4 @@ if sys.version_info < (3, 9): @overload def openfp(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload - def openfp(f: _File, mode: str | None = ...) -> Any: ... + def openfp(f: _File, mode: str | None = None) -> Any: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index 1bdcace7d897..20d9dfa9d137 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -78,7 +78,7 @@ class _ActionsContainer: _has_negative_number_optionals: list[bool] def __init__(self, description: str | None, prefix_chars: str, argument_default: Any, conflict_handler: str) -> None: ... def register(self, registry_name: str, value: Any, object: Any) -> None: ... - def _registry_get(self, registry_name: str, value: Any, default: Any = ...) -> Any: ... + def _registry_get(self, registry_name: str, value: Any, default: Any = None) -> Any: ... def set_defaults(self, **kwargs: Any) -> None: ... def get_default(self, dest: str) -> Any: ... def add_argument( @@ -104,7 +104,7 @@ class _ActionsContainer: def _add_container_actions(self, container: _ActionsContainer) -> None: ... def _get_positional_kwargs(self, dest: str, **kwargs: Any) -> dict[str, Any]: ... def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... - def _pop_action_class(self, kwargs: Any, default: type[Action] | None = ...) -> type[Action]: ... + def _pop_action_class(self, kwargs: Any, default: type[Action] | None = None) -> type[Action]: ... def _get_handler(self) -> Callable[[Action, Iterable[tuple[str, Action]]], Any]: ... def _check_conflict(self, action: Action) -> None: ... def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> NoReturn: ... @@ -131,40 +131,40 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): if sys.version_info >= (3, 9): def __init__( self, - prog: str | None = ..., - usage: str | None = ..., - description: str | None = ..., - epilog: str | None = ..., + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, parents: Sequence[ArgumentParser] = ..., formatter_class: _FormatterClass = ..., - prefix_chars: str = ..., - fromfile_prefix_chars: str | None = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - add_help: bool = ..., - allow_abbrev: bool = ..., - exit_on_error: bool = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, ) -> None: ... else: def __init__( self, - prog: str | None = ..., - usage: str | None = ..., - description: str | None = ..., - epilog: str | None = ..., + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, parents: Sequence[ArgumentParser] = ..., formatter_class: _FormatterClass = ..., - prefix_chars: str = ..., - fromfile_prefix_chars: str | None = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - add_help: bool = ..., - allow_abbrev: bool = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, ) -> None: ... # The type-ignores in these overloads should be temporary. See: # https://github.com/python/typeshed/pull/2643#issuecomment-442280277 @overload - def parse_args(self, args: Sequence[str] | None = ...) -> Namespace: ... + def parse_args(self, args: Sequence[str] | None = None) -> Namespace: ... @overload def parse_args(self, args: Sequence[str] | None, namespace: None) -> Namespace: ... # type: ignore[misc] @overload @@ -202,19 +202,19 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): help: str | None = ..., metavar: str | None = ..., ) -> _SubParsersAction[_ArgumentParserT]: ... - def print_usage(self, file: IO[str] | None = ...) -> None: ... - def print_help(self, file: IO[str] | None = ...) -> None: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... def parse_known_args( - self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... + self, args: Sequence[str] | None = None, namespace: Namespace | None = None ) -> tuple[Namespace, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... - def exit(self, status: int = ..., message: str | None = ...) -> NoReturn: ... + def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... def error(self, message: str) -> NoReturn: ... - def parse_intermixed_args(self, args: Sequence[str] | None = ..., namespace: Namespace | None = ...) -> Namespace: ... + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... def parse_known_intermixed_args( - self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... + self, args: Sequence[str] | None = None, namespace: Namespace | None = None ) -> tuple[Namespace, list[str]]: ... # undocumented def _get_optional_actions(self) -> list[Action]: ... @@ -230,7 +230,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def _get_value(self, action: Action, arg_string: str) -> Any: ... def _check_value(self, action: Action, value: Any) -> None: ... def _get_formatter(self) -> HelpFormatter: ... - def _print_message(self, message: str, file: IO[str] | None = ...) -> None: ... + def _print_message(self, message: str, file: IO[str] | None = None) -> None: ... class HelpFormatter: # undocumented @@ -246,7 +246,7 @@ class HelpFormatter: _whitespace_matcher: Pattern[str] _long_break_matcher: Pattern[str] _Section: type[Any] # Nested class - def __init__(self, prog: str, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ...) -> None: ... + def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None) -> None: ... def _indent(self) -> None: ... def _dedent(self) -> None: ... def _add_item(self, func: Callable[..., str], args: Iterable[Any]) -> None: ... @@ -254,7 +254,7 @@ class HelpFormatter: def end_section(self) -> None: ... def add_text(self, text: str | None) -> None: ... def add_usage( - self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None = ... + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None = None ) -> None: ... def add_argument(self, action: Action) -> None: ... def add_arguments(self, actions: Iterable[Action]) -> None: ... @@ -297,17 +297,17 @@ class Action(_AttributeHolder): self, option_strings: Sequence[str], dest: str, - nargs: int | str | None = ..., - const: _T | None = ..., - default: _T | str | None = ..., - type: Callable[[str], _T] | FileType | None = ..., - choices: Iterable[_T] | None = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + nargs: int | str | None = None, + const: _T | None = None, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... def __call__( - self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = ... + self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None ) -> None: ... if sys.version_info >= (3, 9): def format_usage(self) -> str: ... @@ -318,12 +318,12 @@ if sys.version_info >= (3, 9): self, option_strings: Sequence[str], dest: str, - default: _T | str | None = ..., - type: Callable[[str], _T] | FileType | None = ..., - choices: Iterable[_T] | None = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... class Namespace(_AttributeHolder): @@ -339,7 +339,7 @@ class FileType: _bufsize: int _encoding: str | None _errors: str | None - def __init__(self, mode: str = ..., bufsize: int = ..., encoding: str | None = ..., errors: str | None = ...) -> None: ... + def __init__(self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None) -> None: ... def __call__(self, string: str) -> IO[Any]: ... # undocumented @@ -347,14 +347,14 @@ class _ArgumentGroup(_ActionsContainer): title: str | None _group_actions: list[Action] def __init__( - self, container: _ActionsContainer, title: str | None = ..., description: str | None = ..., **kwargs: Any + self, container: _ActionsContainer, title: str | None = None, description: str | None = None, **kwargs: Any ) -> None: ... # undocumented class _MutuallyExclusiveGroup(_ArgumentGroup): required: bool _container: _ActionsContainer - def __init__(self, container: _ActionsContainer, required: bool = ...) -> None: ... + def __init__(self, container: _ActionsContainer, required: bool = False) -> None: ... # undocumented class _StoreAction(Action): ... @@ -366,11 +366,11 @@ class _StoreConstAction(Action): self, option_strings: Sequence[str], dest: str, - const: Any | None = ..., - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... else: def __init__( @@ -378,22 +378,22 @@ class _StoreConstAction(Action): option_strings: Sequence[str], dest: str, const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... # undocumented class _StoreTrueAction(_StoreConstAction): def __init__( - self, option_strings: Sequence[str], dest: str, default: bool = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: bool = False, required: bool = False, help: str | None = None ) -> None: ... # undocumented class _StoreFalseAction(_StoreConstAction): def __init__( - self, option_strings: Sequence[str], dest: str, default: bool = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: bool = True, required: bool = False, help: str | None = None ) -> None: ... # undocumented @@ -410,11 +410,11 @@ class _AppendConstAction(Action): self, option_strings: Sequence[str], dest: str, - const: Any | None = ..., - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... else: def __init__( @@ -422,27 +422,34 @@ class _AppendConstAction(Action): option_strings: Sequence[str], dest: str, const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... # undocumented class _CountAction(Action): def __init__( - self, option_strings: Sequence[str], dest: str, default: Any = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: Any = None, required: bool = False, help: str | None = None ) -> None: ... # undocumented class _HelpAction(Action): - def __init__(self, option_strings: Sequence[str], dest: str = ..., default: str = ..., help: str | None = ...) -> None: ... + def __init__( + self, option_strings: Sequence[str], dest: str = "==SUPPRESS==", default: str = "==SUPPRESS==", help: str | None = None + ) -> None: ... # undocumented class _VersionAction(Action): version: str | None def __init__( - self, option_strings: Sequence[str], version: str | None = ..., dest: str = ..., default: str = ..., help: str = ... + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str = "show program's version number and exit", ) -> None: ... # undocumented @@ -458,10 +465,10 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): option_strings: Sequence[str], prog: str, parser_class: type[_ArgumentParserT], - dest: str = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + dest: str = "==SUPPRESS==", + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... # Note: `add_parser` accepts all kwargs of `ArgumentParser.__init__`. It also diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index e84456049df6..25c389c47e8e 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -3,7 +3,7 @@ from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite from collections.abc import Iterable # pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence -from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y027 +from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y022 from typing_extensions import Literal, SupportsIndex, TypeAlias _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] @@ -44,12 +44,12 @@ class array(MutableSequence[_T], Generic[_T]): def fromlist(self, __list: list[_T]) -> None: ... def fromunicode(self, __ustr: str) -> None: ... if sys.version_info >= (3, 10): - def index(self, __v: _T, __start: int = ..., __stop: int = ...) -> int: ... + def index(self, __v: _T, __start: int = 0, __stop: int = sys.maxsize) -> int: ... else: def index(self, __v: _T) -> int: ... # type: ignore[override] def insert(self, __i: int, __v: _T) -> None: ... - def pop(self, __i: int = ...) -> _T: ... + def pop(self, __i: int = -1) -> _T: ... def remove(self, __v: _T) -> None: ... def tobytes(self) -> bytes: ... def tofile(self, __f: SupportsWrite[bytes]) -> None: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 9a5bf0a623fb..ea899e150f97 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -1,7 +1,7 @@ import os import sys from _ast import * -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, Unused from collections.abc import Iterator from typing import Any, TypeVar, overload from typing_extensions import Literal @@ -9,7 +9,7 @@ from typing_extensions import Literal if sys.version_info >= (3, 8): class _ABC(type): if sys.version_info >= (3, 9): - def __init__(cls, *args: object) -> None: ... + def __init__(cls, *args: Unused) -> None: ... class Num(Constant, metaclass=_ABC): value: int | float | complex @@ -174,11 +174,11 @@ if sys.version_info >= (3, 8): @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = ..., - mode: Literal["exec"] = ..., + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Module: ... @overload def parse( @@ -186,8 +186,8 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Expression: ... @overload def parse( @@ -195,8 +195,8 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["func_type"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> FunctionType: ... @overload def parse( @@ -204,47 +204,49 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Interactive: ... @overload def parse( source: str | ReadableBuffer, *, mode: Literal["eval"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Expression: ... @overload def parse( source: str | ReadableBuffer, *, mode: Literal["func_type"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> FunctionType: ... @overload def parse( source: str | ReadableBuffer, *, mode: Literal["single"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Interactive: ... @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = ..., - mode: str = ..., + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: str = "exec", *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> AST: ... else: @overload def parse( - source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: Literal["exec"] = ... + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", ) -> Module: ... @overload def parse( @@ -259,7 +261,9 @@ else: @overload def parse(source: str | ReadableBuffer, *, mode: Literal["single"]) -> Interactive: ... @overload - def parse(source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: str = ...) -> AST: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = "", mode: str = "exec" + ) -> AST: ... if sys.version_info >= (3, 9): def unparse(ast_obj: AST) -> str: ... @@ -268,21 +272,21 @@ def copy_location(new_node: _T, old_node: AST) -> _T: ... if sys.version_info >= (3, 9): def dump( - node: AST, annotate_fields: bool = ..., include_attributes: bool = ..., *, indent: int | str | None = ... + node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None ) -> str: ... else: - def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... + def dump(node: AST, annotate_fields: bool = True, include_attributes: bool = False) -> str: ... def fix_missing_locations(node: _T) -> _T: ... -def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = ...) -> str | None: ... -def increment_lineno(node: _T, n: int = ...) -> _T: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... +def increment_lineno(node: _T, n: int = 1) -> _T: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... def literal_eval(node_or_string: str | AST) -> Any: ... if sys.version_info >= (3, 8): - def get_source_segment(source: str, node: AST, *, padded: bool = ...) -> str | None: ... + def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... def walk(node: AST) -> Iterator[AST]: ... diff --git a/mypy/typeshed/stdlib/asynchat.pyi b/mypy/typeshed/stdlib/asynchat.pyi index 4d43b02c056c..79a70d1c1ec8 100644 --- a/mypy/typeshed/stdlib/asynchat.pyi +++ b/mypy/typeshed/stdlib/asynchat.pyi @@ -2,7 +2,7 @@ import asyncore from abc import abstractmethod class simple_producer: - def __init__(self, data: bytes, buffer_size: int = ...) -> None: ... + def __init__(self, data: bytes, buffer_size: int = 512) -> None: ... def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 83576ab6455e..3b8f286710b9 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -34,7 +34,7 @@ class Server(AbstractServer): ssl_context: _SSLContext, backlog: int, ssl_handshake_timeout: float | None, - ssl_shutdown_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = None, ) -> None: ... else: def __init__( @@ -74,28 +74,30 @@ class BaseEventLoop(AbstractEventLoop): def close(self) -> None: ... async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. - def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... def call_later( - self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + def call_at( + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... - def call_at(self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> TimerHandle: ... def time(self) -> float: ... # Future methods def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 11): def create_task( - self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ..., context: Context | None = ... + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None, context: Context | None = None ) -> Task[_T]: ... elif sys.version_info >= (3, 8): - def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ...) -> Task[_T]: ... + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None) -> Task[_T]: ... else: def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... def set_task_factory(self, factory: _TaskFactory | None) -> None: ... def get_task_factory(self) -> _TaskFactory | None: ... # Methods for interacting with threads - def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. @@ -104,12 +106,12 @@ class BaseEventLoop(AbstractEventLoop): host: bytes | str | None, port: bytes | str | int | None, *, - family: int = ..., - type: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... - async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... if sys.version_info >= (3, 11): @overload async def create_connection( @@ -118,36 +120,36 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload @@ -157,34 +159,34 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... else: @overload @@ -194,67 +196,67 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... async def start_tls( self, @@ -262,54 +264,54 @@ class BaseEventLoop(AbstractEventLoop): protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... async def start_tls( self, @@ -317,53 +319,53 @@ class BaseEventLoop(AbstractEventLoop): protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True ) -> int: ... async def sendfile( - self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, ) -> tuple[DatagramTransport, _ProtocolT]: ... else: async def create_datagram_endpoint( self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + proto: int = 0, + flags: int = 0, reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( @@ -377,15 +379,15 @@ class BaseEventLoop(AbstractEventLoop): protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., - text: Literal[False, None] = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... async def subprocess_exec( @@ -393,14 +395,14 @@ class BaseEventLoop(AbstractEventLoop): protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... @@ -416,7 +418,7 @@ class BaseEventLoop(AbstractEventLoop): async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi index d3ab16a3edd2..597c8302988e 100644 --- a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -9,7 +9,6 @@ from . import events, futures, protocols, transports _File: TypeAlias = int | IO[Any] | None class BaseSubprocessTransport(transports.SubprocessTransport): - _closed: bool # undocumented _protocol: protocols.SubprocessProtocol # undocumented _loop: events.AbstractEventLoop # undocumented @@ -30,8 +29,8 @@ class BaseSubprocessTransport(transports.SubprocessTransport): stdout: _File, stderr: _File, bufsize: int, - waiter: futures.Future[Any] | None = ..., - extra: Any | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Any | None = None, **kwargs: Any, ) -> None: ... def _start( diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 7241d5a29f8d..b2292801ee0d 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike, ReadableBuffer, Self, StrPath, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, Self, StrPath, Unused, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from contextvars import Context @@ -70,7 +70,7 @@ class Handle: _cancelled: bool _args: Sequence[Any] def __init__( - self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = ... + self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = None ) -> None: ... def cancel(self) -> None: ... def _run(self) -> None: ... @@ -83,7 +83,7 @@ class TimerHandle(Handle): callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, - context: Context | None = ..., + context: Context | None = None, ) -> None: ... def when(self) -> float: ... def __lt__(self, other: TimerHandle) -> bool: ... @@ -96,7 +96,7 @@ class AbstractServer: @abstractmethod def close(self) -> None: ... async def __aenter__(self: Self) -> Self: ... - async def __aexit__(self, *exc: object) -> None: ... + async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod def get_loop(self) -> AbstractEventLoop: ... @abstractmethod @@ -132,14 +132,14 @@ class AbstractEventLoop: # Methods scheduling callbacks. All these return Handles. if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 @abstractmethod - def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... @abstractmethod def call_later( - self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... @abstractmethod def call_at( - self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... else: @abstractmethod @@ -161,13 +161,13 @@ class AbstractEventLoop: self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, - name: str | None = ..., - context: Context | None = ..., + name: str | None = None, + context: Context | None = None, ) -> Task[_T]: ... elif sys.version_info >= (3, 8): @abstractmethod def create_task( - self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = ... + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = None ) -> Task[_T]: ... else: @abstractmethod @@ -180,7 +180,7 @@ class AbstractEventLoop: # Methods for interacting with threads if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 @abstractmethod - def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... else: @abstractmethod def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any) -> Handle: ... @@ -196,13 +196,13 @@ class AbstractEventLoop: host: bytes | str | None, port: bytes | str | int | None, *, - family: int = ..., - type: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... @abstractmethod - async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... if sys.version_info >= (3, 11): @overload @abstractmethod @@ -212,37 +212,37 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload @@ -253,35 +253,35 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... else: @overload @@ -292,31 +292,31 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload @@ -324,38 +324,38 @@ class AbstractEventLoop: async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload @abstractmethod async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @abstractmethod async def start_tls( @@ -364,22 +364,22 @@ class AbstractEventLoop: protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, - path: StrPath | None = ..., + path: StrPath | None = None, *, - sock: socket | None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... else: @overload @@ -387,36 +387,36 @@ class AbstractEventLoop: async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload @abstractmethod async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @abstractmethod async def start_tls( @@ -425,20 +425,20 @@ class AbstractEventLoop: protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, - path: StrPath | None = ..., + path: StrPath | None = None, *, - sock: socket | None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... if sys.version_info >= (3, 11): async def connect_accepted_socket( @@ -446,9 +446,9 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 10): async def connect_accepted_socket( @@ -456,55 +456,55 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, protocol_factory: Callable[[], _ProtocolT], - path: str | None = ..., + path: str | None = None, *, - ssl: _SSLContext = ..., - sock: socket | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... else: async def create_unix_connection( self, protocol_factory: Callable[[], _ProtocolT], - path: str | None = ..., + path: str | None = None, *, - ssl: _SSLContext = ..., - sock: socket | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, ) -> tuple[Transport, _ProtocolT]: ... @abstractmethod async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = None ) -> int: ... @abstractmethod async def sendfile( - self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True ) -> int: ... @abstractmethod async def create_datagram_endpoint( self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod @@ -521,9 +521,9 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, universal_newlines: Literal[False] = ..., shell: Literal[True] = ..., bufsize: Literal[0] = ..., @@ -538,11 +538,11 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., + shell: Literal[False] = ..., bufsize: Literal[0] = ..., encoding: None = ..., errors: None = ..., @@ -571,7 +571,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... @abstractmethod - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... @abstractmethod async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. diff --git a/mypy/typeshed/stdlib/asyncio/format_helpers.pyi b/mypy/typeshed/stdlib/asyncio/format_helpers.pyi index 4e2ef8d3f274..1c78dff3948a 100644 --- a/mypy/typeshed/stdlib/asyncio/format_helpers.pyi +++ b/mypy/typeshed/stdlib/asyncio/format_helpers.pyi @@ -16,5 +16,5 @@ def _get_function_source(func: _FuncType) -> tuple[str, int]: ... def _get_function_source(func: object) -> tuple[str, int] | None: ... def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... -def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = ...) -> str: ... -def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> traceback.StackSummary: ... +def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi index f917bd5dee98..f325272d2403 100644 --- a/mypy/typeshed/stdlib/asyncio/futures.pyi +++ b/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -44,9 +44,9 @@ class Future(Awaitable[_T], Iterable[_T]): def get_loop(self) -> AbstractEventLoop: ... @property def _callbacks(self: Self) -> list[tuple[Callable[[Self], Any], Context]]: ... - def add_done_callback(self: Self, __fn: Callable[[Self], object], *, context: Context | None = ...) -> None: ... + def add_done_callback(self: Self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... if sys.version_info >= (3, 9): - def cancel(self, msg: Any | None = ...) -> bool: ... + def cancel(self, msg: Any | None = None) -> bool: ... else: def cancel(self) -> bool: ... @@ -64,4 +64,4 @@ class Future(Awaitable[_T], Iterable[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/locks.pyi b/mypy/typeshed/stdlib/asyncio/locks.pyi index a5cdf9aa1184..87bcaa2110db 100644 --- a/mypy/typeshed/stdlib/asyncio/locks.pyi +++ b/mypy/typeshed/stdlib/asyncio/locks.pyi @@ -1,6 +1,6 @@ import enum import sys -from _typeshed import Self +from _typeshed import Self, Unused from collections import deque from collections.abc import Callable, Generator from types import TracebackType @@ -31,7 +31,7 @@ else: class _ContextManager: def __init__(self, lock: Lock | Semaphore) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class _ContextManagerMixin: # Apparently this exists to *prohibit* use as a context manager. @@ -45,20 +45,20 @@ else: ) -> None: ... class Lock(_ContextManagerMixin): - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... class Event: - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... @@ -66,26 +66,26 @@ class Event: async def wait(self) -> Literal[True]: ... class Condition(_ContextManagerMixin): - if sys.version_info >= (3, 11): - def __init__(self, lock: Lock | None = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, lock: Lock | None = None) -> None: ... else: - def __init__(self, lock: Lock | None = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... async def wait(self) -> Literal[True]: ... async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin): _value: int _waiters: deque[Future[Any]] - if sys.version_info >= (3, 11): - def __init__(self, value: int = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, value: int = 1) -> None: ... else: - def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... @@ -104,7 +104,7 @@ if sys.version_info >= (3, 11): class Barrier(_LoopBoundMixin): def __init__(self, parties: int) -> None: ... async def __aenter__(self: Self) -> Self: ... - async def __aexit__(self, *args: object) -> None: ... + async def __aexit__(self, *args: Unused) -> None: ... async def wait(self) -> int: ... async def abort(self) -> None: ... async def reset(self) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi index 704939450cc5..33fdf84ade4a 100644 --- a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi @@ -20,9 +20,9 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... if sys.version_info >= (3, 8): def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ... @@ -36,10 +36,10 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., - buffer_size: int = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + buffer_size: int = 65536, ) -> None: ... else: def __init__( @@ -47,9 +47,9 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... @@ -57,16 +57,15 @@ class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - _sendfile_compatible: ClassVar[constants._SendfileMode] def __init__( self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... def _set_extra(self, sock: socket) -> None: ... def can_write_eof(self) -> Literal[True]: ... diff --git a/mypy/typeshed/stdlib/asyncio/queues.pyi b/mypy/typeshed/stdlib/asyncio/queues.pyi index 90ba39aebb96..f56a09524e71 100644 --- a/mypy/typeshed/stdlib/asyncio/queues.pyi +++ b/mypy/typeshed/stdlib/asyncio/queues.pyi @@ -13,10 +13,10 @@ class QueueFull(Exception): ... _T = TypeVar("_T") class Queue(Generic[_T]): - if sys.version_info >= (3, 11): - def __init__(self, maxsize: int = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, maxsize: int = 0) -> None: ... else: - def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ... def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 74ed83ed8dc4..484f9eb831a1 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import Self, Unused from collections.abc import Callable, Coroutine from contextvars import Context from typing import Any, TypeVar @@ -16,12 +16,12 @@ _T = TypeVar("_T") if sys.version_info >= (3, 11): @final class Runner: - def __init__(self, *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...) -> None: ... + def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> None: ... + def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... def close(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... - def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = ...) -> _T: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... if sys.version_info >= (3, 12): def run( @@ -29,7 +29,7 @@ if sys.version_info >= (3, 12): ) -> _T: ... elif sys.version_info >= (3, 8): - def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = ...) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... else: - def run(main: Coroutine[Any, Any, _T], *, debug: bool = ...) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool = False) -> _T: ... diff --git a/mypy/typeshed/stdlib/asyncio/selector_events.pyi b/mypy/typeshed/stdlib/asyncio/selector_events.pyi index c5468d4d72c7..430f2dd405cd 100644 --- a/mypy/typeshed/stdlib/asyncio/selector_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/selector_events.pyi @@ -5,4 +5,4 @@ from . import base_events __all__ = ("BaseSelectorEventLoop",) class BaseSelectorEventLoop(base_events.BaseEventLoop): - def __init__(self, selector: selectors.BaseSelector | None = ...) -> None: ... + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi index 3c1c7b2e4edb..aadc7d32b40f 100644 --- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi +++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -35,7 +35,6 @@ else: if sys.version_info < (3, 11): class _SSLPipe: - max_size: ClassVar[int] _context: ssl.SSLContext @@ -48,7 +47,7 @@ if sys.version_info < (3, 11): _need_ssldata: bool _handshake_cb: Callable[[BaseException | None], None] | None _shutdown_cb: Callable[[], None] | None - def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = ...) -> None: ... + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... @property def context(self) -> ssl.SSLContext: ... @property @@ -57,21 +56,20 @@ if sys.version_info < (3, 11): def need_ssldata(self) -> bool: ... @property def wrapped(self) -> bool: ... - def do_handshake(self, callback: Callable[[BaseException | None], object] | None = ...) -> list[bytes]: ... - def shutdown(self, callback: Callable[[], object] | None = ...) -> list[bytes]: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... def feed_eof(self) -> None: ... - def feed_ssldata(self, data: bytes, only_handshake: bool = ...) -> tuple[list[bytes], list[bytes]]: ... - def feed_appdata(self, data: bytes, offset: int = ...) -> tuple[list[bytes], int]: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): - _sendfile_compatible: ClassVar[constants._SendfileMode] _loop: events.AbstractEventLoop _ssl_protocol: SSLProtocol _closed: bool def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... - def get_extra_info(self, name: str, default: Any | None = ...) -> dict[str, Any]: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... def write(self, data: bytes | bytearray | memoryview) -> None: ... @@ -79,7 +77,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... def get_read_buffer_limits(self) -> tuple[int, int]: ... - def set_read_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... def get_read_buffer_size(self) -> int: ... if sys.version_info >= (3, 11): @@ -118,11 +116,11 @@ class SSLProtocol(_SSLProtocolBase): app_protocol: protocols.BaseProtocol, sslcontext: ssl.SSLContext, waiter: futures.Future[Any], - server_side: bool = ..., - server_hostname: str | None = ..., - call_connection_made: bool = ..., - ssl_handshake_timeout: int | None = ..., - ssl_shutdown_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ssl_shutdown_timeout: float | None = None, ) -> None: ... else: def __init__( @@ -131,17 +129,17 @@ class SSLProtocol(_SSLProtocolBase): app_protocol: protocols.BaseProtocol, sslcontext: ssl.SSLContext, waiter: futures.Future[Any], - server_side: bool = ..., - server_hostname: str | None = ..., - call_connection_made: bool = ..., - ssl_handshake_timeout: int | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, ) -> None: ... def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... - def _wakeup_waiter(self, exc: BaseException | None = ...) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... def connection_lost(self, exc: BaseException | None) -> None: ... def eof_received(self) -> None: ... - def _get_extra_info(self, name: str, default: Any | None = ...) -> Any: ... + def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... def _start_shutdown(self) -> None: ... if sys.version_info >= (3, 11): def _write_appdata(self, list_of_data: list[bytes]) -> None: ... @@ -151,7 +149,7 @@ class SSLProtocol(_SSLProtocolBase): def _start_handshake(self) -> None: ... def _check_handshake_timeout(self) -> None: ... def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... - def _fatal_error(self, exc: BaseException, message: str = ...) -> None: ... + def _fatal_error(self, exc: BaseException, message: str = "Fatal error on transport") -> None: ... def _abort(self) -> None: ... if sys.version_info >= (3, 11): def get_buffer(self, n: int) -> memoryview: ... diff --git a/mypy/typeshed/stdlib/asyncio/staggered.pyi b/mypy/typeshed/stdlib/asyncio/staggered.pyi index 610d6f70b614..3324777f4168 100644 --- a/mypy/typeshed/stdlib/asyncio/staggered.pyi +++ b/mypy/typeshed/stdlib/asyncio/staggered.pyi @@ -6,5 +6,5 @@ from . import events __all__ = ("staggered_race",) async def staggered_race( - coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = ... + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None ) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 00d95d93f2ff..2468f482291c 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -59,40 +59,40 @@ if sys.version_info < (3, 8): if sys.version_info >= (3, 10): async def open_connection( - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - limit: int = ..., + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, - host: str | Sequence[str] | None = ..., - port: int | str | None = ..., + host: str | Sequence[str] | None = None, + port: int | str | None = None, *, - limit: int = ..., + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> Server: ... else: async def open_connection( - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> Server: ... @@ -100,33 +100,33 @@ else: if sys.platform != "win32": if sys.version_info >= (3, 10): async def open_unix_connection( - path: StrPath | None = ..., *, limit: int = ..., **kwds: Any + path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( - client_connected_cb: _ClientConnectedCallback, path: StrPath | None = ..., *, limit: int = ..., **kwds: Any + client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> Server: ... else: async def open_unix_connection( - path: StrPath | None = ..., *, loop: events.AbstractEventLoop | None = ..., limit: int = ..., **kwds: Any + path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( client_connected_cb: _ClientConnectedCallback, - path: StrPath | None = ..., + path: StrPath | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, **kwds: Any, ) -> Server: ... class FlowControlMixin(protocols.Protocol): - def __init__(self, loop: events.AbstractEventLoop | None = ...) -> None: ... + def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __init__( self, stream_reader: StreamReader, - client_connected_cb: _ClientConnectedCallback | None = ..., - loop: events.AbstractEventLoop | None = ..., + client_connected_cb: _ClientConnectedCallback | None = None, + loop: events.AbstractEventLoop | None = None, ) -> None: ... class StreamWriter: @@ -146,15 +146,15 @@ class StreamWriter: def close(self) -> None: ... def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... - def get_extra_info(self, name: str, default: Any = ...) -> Any: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... async def drain(self) -> None: ... if sys.version_info >= (3, 11): async def start_tls( - self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ... + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None ) -> None: ... class StreamReader(AsyncIterator[bytes]): - def __init__(self, limit: int = ..., loop: events.AbstractEventLoop | None = ...) -> None: ... + def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ... def exception(self) -> Exception: ... def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... @@ -163,8 +163,8 @@ class StreamReader(AsyncIterator[bytes]): def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... async def readline(self) -> bytes: ... # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted - async def readuntil(self, separator: bytes | bytearray | memoryview = ...) -> bytes: ... - async def read(self, n: int = ...) -> bytes: ... + async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ... + async def read(self, n: int = -1) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... def __aiter__(self: Self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index d483f57551b0..b112a9d80a32 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -38,15 +38,15 @@ class Process: def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - async def communicate(self, input: bytes | bytearray | memoryview | None = ...) -> tuple[bytes, bytes]: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = None) -> tuple[bytes, bytes]: ... if sys.version_info >= (3, 11): async def create_subprocess_shell( cmd: str | bytes, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., @@ -76,10 +76,10 @@ if sys.version_info >= (3, 11): async def create_subprocess_exec( program: _ExecArg, *args: _ExecArg, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., shell: Literal[True] = ..., @@ -109,10 +109,10 @@ if sys.version_info >= (3, 11): elif sys.version_info >= (3, 10): async def create_subprocess_shell( cmd: str | bytes, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., @@ -141,10 +141,10 @@ elif sys.version_info >= (3, 10): async def create_subprocess_exec( program: _ExecArg, *args: _ExecArg, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., shell: Literal[True] = ..., @@ -173,11 +173,11 @@ elif sys.version_info >= (3, 10): else: # >= 3.9 async def create_subprocess_shell( cmd: str | bytes, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., @@ -205,11 +205,11 @@ else: # >= 3.9 async def create_subprocess_exec( program: _ExecArg, *args: _ExecArg, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell universal_newlines: Literal[False] = ..., shell: Literal[True] = ..., diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 0d508c97c1f9..9e6c6e047368 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -16,5 +16,5 @@ class TaskGroup: async def __aenter__(self: Self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def create_task( - self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None ) -> Task[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 43dd020fa99d..0a44255a3ac8 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -51,17 +51,17 @@ FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION ALL_COMPLETED = concurrent.futures.ALL_COMPLETED if sys.version_info >= (3, 10): - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = ...) -> Iterator[Future[_T]]: ... + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... else: def as_completed( - fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ... + fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None ) -> Iterator[Future[_T]]: ... @overload -def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = ...) -> _FT: ... # type: ignore[misc] +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[misc] @overload -def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = ...) -> Task[_T]: ... +def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... # `gather()` actually returns a list with length equal to the number # of tasks passed; however, Tuple is used similar to the annotation for @@ -72,10 +72,10 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No # but having overlapping overloads is the only way to get acceptable type inference in all edge cases. if sys.version_info >= (3, 10): @overload - def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = ...) -> Future[tuple[_T1]]: ... # type: ignore[misc] + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[misc] @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = ... + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = False ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( # type: ignore[misc] @@ -83,7 +83,7 @@ if sys.version_info >= (3, 10): __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( # type: ignore[misc] @@ -92,7 +92,7 @@ if sys.version_info >= (3, 10): __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( # type: ignore[misc] @@ -102,7 +102,7 @@ if sys.version_info >= (3, 10): __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[misc] @@ -140,20 +140,20 @@ if sys.version_info >= (3, 10): tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... @overload - def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = ...) -> Future[list[Any]]: ... # type: ignore[misc] + def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ... # type: ignore[misc] else: @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ... + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False ) -> Future[tuple[_T1]]: ... @overload def gather( # type: ignore[misc] __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( # type: ignore[misc] @@ -161,8 +161,8 @@ else: __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( # type: ignore[misc] @@ -171,8 +171,8 @@ else: __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( # type: ignore[misc] @@ -182,19 +182,19 @@ else: __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool ) -> Future[tuple[_T1 | BaseException]]: ... @overload def gather( # type: ignore[misc] __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... @overload @@ -203,7 +203,7 @@ else: __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... @overload @@ -213,7 +213,7 @@ else: __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... @overload @@ -224,14 +224,14 @@ else: __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[ tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... @overload def gather( # type: ignore[misc] - *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = ..., return_exceptions: bool = ... + *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False ) -> Future[list[Any]]: ... def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... @@ -243,28 +243,36 @@ if sys.version_info >= (3, 10): @overload async def sleep(delay: float, result: _T) -> _T: ... @overload - async def wait(fs: Iterable[_FT], *, timeout: float | None = ..., return_when: str = ...) -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] @overload async def wait( - fs: Iterable[Awaitable[_T]], *, timeout: float | None = ..., return_when: str = ... + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... else: - def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... @overload - async def sleep(delay: float, *, loop: AbstractEventLoop | None = ...) -> None: ... + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... @overload - async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = ...) -> _T: ... + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... @overload async def wait( # type: ignore[misc] - fs: Iterable[_FT], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... + fs: Iterable[_FT], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( - fs: Iterable[Awaitable[_T]], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... + fs: Iterable[Awaitable[_T]], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = ...) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... # mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. # While this is true in general, here it's sort-of okay to have a covariant subclass, @@ -288,33 +296,33 @@ class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: def get_name(self) -> str: ... def set_name(self, __value: object) -> None: ... - def get_stack(self, *, limit: int | None = ...) -> list[FrameType]: ... - def print_stack(self, *, limit: int | None = ..., file: TextIO | None = ...) -> None: ... + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... if sys.version_info >= (3, 11): def cancelling(self) -> int: ... def uncancel(self) -> int: ... if sys.version_info < (3, 9): @classmethod - def current_task(cls, loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... + def current_task(cls, loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... @classmethod - def all_tasks(cls, loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... + def all_tasks(cls, loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def all_tasks(loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... if sys.version_info >= (3, 11): def create_task( - coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None ) -> Task[_T]: ... elif sys.version_info >= (3, 8): - def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None) -> Task[_T]: ... else: def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T]) -> Task[_T]: ... -def current_task(loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... +def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... def _register_task(task: Task[Any]) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/transports.pyi b/mypy/typeshed/stdlib/asyncio/transports.pyi index 893292dd12b6..531f77672438 100644 --- a/mypy/typeshed/stdlib/asyncio/transports.pyi +++ b/mypy/typeshed/stdlib/asyncio/transports.pyi @@ -7,8 +7,8 @@ from typing import Any __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") class BaseTransport: - def __init__(self, extra: Mapping[str, Any] | None = ...) -> None: ... - def get_extra_info(self, name: str, default: Any = ...) -> Any: ... + def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... def is_closing(self) -> bool: ... def close(self) -> None: ... def set_protocol(self, protocol: BaseProtocol) -> None: ... @@ -20,7 +20,7 @@ class ReadTransport(BaseTransport): def resume_reading(self) -> None: ... class WriteTransport(BaseTransport): - def set_write_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... def get_write_buffer_size(self) -> int: ... def get_write_buffer_limits(self) -> tuple[int, int]: ... def write(self, data: bytes | bytearray | memoryview) -> None: ... @@ -32,7 +32,7 @@ class WriteTransport(BaseTransport): class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): - def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = ...) -> None: ... + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): @@ -44,4 +44,4 @@ class SubprocessTransport(BaseTransport): def kill(self) -> None: ... class _FlowControlMixin(Transport): - def __init__(self, extra: Mapping[str, Any] | None = ..., loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/mypy/typeshed/stdlib/asyncio/unix_events.pyi index 19dd3ca43b95..5e2b05f57ef1 100644 --- a/mypy/typeshed/stdlib/asyncio/unix_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/unix_events.pyi @@ -85,7 +85,6 @@ if sys.platform != "win32": DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy if sys.version_info >= (3, 8): - from typing import Protocol class _Warn(Protocol): diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi index dca06ea33b13..2942a25c0ac4 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -33,7 +33,7 @@ if sys.platform == "win32": class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... class ProactorEventLoop(proactor_events.BaseProactorEventLoop): - def __init__(self, proactor: IocpProactor | None = ...) -> None: ... + def __init__(self, proactor: IocpProactor | None = None) -> None: ... async def create_pipe_connection( self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str ) -> tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... @@ -42,13 +42,13 @@ if sys.platform == "win32": ) -> list[PipeServer]: ... class IocpProactor: - def __init__(self, concurrency: int = ...) -> None: ... + def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... - def select(self, timeout: int | None = ...) -> list[futures.Future[Any]]: ... - def recv(self, conn: socket.socket, nbytes: int, flags: int = ...) -> futures.Future[bytes]: ... - def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... - def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... + def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ... + def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ... + def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... def connect( self, @@ -58,7 +58,7 @@ if sys.platform == "win32": def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... - def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... def close(self) -> None: ... SelectorEventLoop = _WindowsSelectorEventLoop diff --git a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi index 6e170dcb073a..6ac4e0d89aa4 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi @@ -16,7 +16,7 @@ if sys.platform == "win32": BUFSIZE: Literal[8192] PIPE = subprocess.PIPE STDOUT = subprocess.STDOUT - def pipe(*, duplex: bool = ..., overlapped: tuple[bool, bool] = ..., bufsize: int = ...) -> tuple[int, int]: ... + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = ..., bufsize: int = 8192) -> tuple[int, int]: ... class PipeHandle: def __init__(self, handle: int) -> None: ... @@ -51,8 +51,8 @@ if sys.platform == "win32": def __init__( self, args: subprocess._CMD, - stdin: subprocess._FILE | None = ..., - stdout: subprocess._FILE | None = ..., - stderr: subprocess._FILE | None = ..., + stdin: subprocess._FILE | None = None, + stdout: subprocess._FILE | None = None, + stderr: subprocess._FILE | None = None, **kwds: Any, ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncore.pyi b/mypy/typeshed/stdlib/asyncore.pyi index 565deb4d1cad..47c8e2207022 100644 --- a/mypy/typeshed/stdlib/asyncore.pyi +++ b/mypy/typeshed/stdlib/asyncore.pyi @@ -15,17 +15,16 @@ class ExitNow(Exception): ... def read(obj: Any) -> None: ... def write(obj: Any) -> None: ... def readwrite(obj: Any, flags: int) -> None: ... -def poll(timeout: float = ..., map: _MapType | None = ...) -> None: ... -def poll2(timeout: float = ..., map: _MapType | None = ...) -> None: ... +def poll(timeout: float = 0.0, map: _MapType | None = None) -> None: ... +def poll2(timeout: float = 0.0, map: _MapType | None = None) -> None: ... poll3 = poll2 -def loop(timeout: float = ..., use_poll: bool = ..., map: _MapType | None = ..., count: int | None = ...) -> None: ... +def loop(timeout: float = 30.0, use_poll: bool = False, map: _MapType | None = None, count: int | None = None) -> None: ... # Not really subclass of socket.socket; it's only delegation. # It is not covariant to it. class dispatcher: - debug: bool connected: bool accepting: bool @@ -33,11 +32,11 @@ class dispatcher: closing: bool ignore_log_types: frozenset[str] socket: _Socket | None - def __init__(self, sock: _Socket | None = ..., map: _MapType | None = ...) -> None: ... - def add_channel(self, map: _MapType | None = ...) -> None: ... - def del_channel(self, map: _MapType | None = ...) -> None: ... + def __init__(self, sock: _Socket | None = None, map: _MapType | None = None) -> None: ... + def add_channel(self, map: _MapType | None = None) -> None: ... + def del_channel(self, map: _MapType | None = None) -> None: ... def create_socket(self, family: int = ..., type: int = ...) -> None: ... - def set_socket(self, sock: _Socket, map: _MapType | None = ...) -> None: ... + def set_socket(self, sock: _Socket, map: _MapType | None = None) -> None: ... def set_reuse_addr(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -49,7 +48,7 @@ class dispatcher: def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... - def log_info(self, message: Any, type: str = ...) -> None: ... + def log_info(self, message: Any, type: str = "info") -> None: ... def handle_read_event(self) -> None: ... def handle_connect_event(self) -> None: ... def handle_write_event(self) -> None: ... @@ -68,7 +67,7 @@ class dispatcher_with_send(dispatcher): # def send(self, data: bytes) -> int | None: ... def compact_traceback() -> tuple[tuple[str, str, str], type, type, str]: ... -def close_all(map: _MapType | None = ..., ignore_all: bool = ...) -> None: ... +def close_all(map: _MapType | None = None, ignore_all: bool = False) -> None: ... if sys.platform != "win32": class file_wrapper: @@ -77,7 +76,7 @@ if sys.platform != "win32": def recv(self, bufsize: int, flags: int = ...) -> bytes: ... def send(self, data: bytes, flags: int = ...) -> int: ... @overload - def getsockopt(self, level: int, optname: int, buflen: None = ...) -> int: ... + def getsockopt(self, level: int, optname: int, buflen: None = None) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def read(self, bufsize: int, flags: int = ...) -> bytes: ... @@ -86,5 +85,5 @@ if sys.platform != "win32": def fileno(self) -> int: ... class file_dispatcher(dispatcher): - def __init__(self, fd: FileDescriptorLike, map: _MapType | None = ...) -> None: ... + def __init__(self, fd: FileDescriptorLike, map: _MapType | None = None) -> None: ... def set_file(self, fd: int) -> None: ... diff --git a/mypy/typeshed/stdlib/audioop.pyi b/mypy/typeshed/stdlib/audioop.pyi index 62b54ced9127..b5934516e40f 100644 --- a/mypy/typeshed/stdlib/audioop.pyi +++ b/mypy/typeshed/stdlib/audioop.pyi @@ -32,8 +32,8 @@ def ratecv( __inrate: int, __outrate: int, __state: _RatecvState | None, - __weightA: int = ..., - __weightB: int = ..., + __weightA: int = 1, + __weightB: int = 0, ) -> tuple[bytes, _RatecvState]: ... def reverse(__fragment: bytes, __width: int) -> bytes: ... def rms(__fragment: bytes, __width: int) -> int: ... diff --git a/mypy/typeshed/stdlib/base64.pyi b/mypy/typeshed/stdlib/base64.pyi index 816622eeb071..24830cbfba04 100644 --- a/mypy/typeshed/stdlib/base64.pyi +++ b/mypy/typeshed/stdlib/base64.pyi @@ -26,26 +26,28 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["b32hexencode", "b32hexdecode"] -def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = ...) -> bytes: ... -def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = ..., validate: bool = ...) -> bytes: ... +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... +def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = None, validate: bool = False) -> bytes: ... def standard_b64encode(s: ReadableBuffer) -> bytes: ... def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... def b32encode(s: ReadableBuffer) -> bytes: ... -def b32decode(s: str | ReadableBuffer, casefold: bool = ..., map01: bytes | None = ...) -> bytes: ... +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: bytes | None = None) -> bytes: ... def b16encode(s: ReadableBuffer) -> bytes: ... -def b16decode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... if sys.version_info >= (3, 10): def b32hexencode(s: ReadableBuffer) -> bytes: ... - def b32hexdecode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... -def a85encode(b: ReadableBuffer, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... +def a85encode( + b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False +) -> bytes: ... def a85decode( - b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: bytearray | bytes = ... + b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" ) -> bytes: ... -def b85encode(b: ReadableBuffer, pad: bool = ...) -> bytes: ... +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... def b85decode(b: str | ReadableBuffer) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... def encode(input: IO[bytes], output: IO[bytes]) -> None: ... diff --git a/mypy/typeshed/stdlib/bdb.pyi b/mypy/typeshed/stdlib/bdb.pyi index 58808632b31d..2a1fdddff7e9 100644 --- a/mypy/typeshed/stdlib/bdb.pyi +++ b/mypy/typeshed/stdlib/bdb.pyi @@ -24,7 +24,7 @@ class Bdb: stopframe: FrameType | None returnframe: FrameType | None stoplineno: int - def __init__(self, skip: Iterable[str] | None = ...) -> None: ... + def __init__(self, skip: Iterable[str] | None = None) -> None: ... def canonic(self, filename: str) -> str: ... def reset(self) -> None: ... def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... @@ -41,15 +41,15 @@ class Bdb: def user_line(self, frame: FrameType) -> None: ... def user_return(self, frame: FrameType, return_value: Any) -> None: ... def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... - def set_until(self, frame: FrameType, lineno: int | None = ...) -> None: ... + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... def set_step(self) -> None: ... def set_next(self, frame: FrameType) -> None: ... def set_return(self, frame: FrameType) -> None: ... - def set_trace(self, frame: FrameType | None = ...) -> None: ... + def set_trace(self, frame: FrameType | None = None) -> None: ... def set_continue(self) -> None: ... def set_quit(self) -> None: ... def set_break( - self, filename: str, lineno: int, temporary: bool = ..., cond: str | None = ..., funcname: str | None = ... + self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None ) -> None: ... def clear_break(self, filename: str, lineno: int) -> None: ... def clear_bpbynumber(self, arg: SupportsInt) -> None: ... @@ -61,14 +61,15 @@ class Bdb: def get_file_breaks(self, filename: str) -> list[Breakpoint]: ... def get_all_breaks(self) -> list[Breakpoint]: ... def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... - def format_stack_entry(self, frame_lineno: int, lprefix: str = ...) -> str: ... - def run(self, cmd: str | CodeType, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... - def runeval(self, expr: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... + def format_stack_entry(self, frame_lineno: int, lprefix: str = ": ") -> str: ... + def run( + self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... class Breakpoint: - next: int bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] @@ -84,7 +85,7 @@ class Breakpoint: hits: int number: int def __init__( - self, file: str, line: int, temporary: bool = ..., cond: str | None = ..., funcname: str | None = ... + self, file: str, line: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None ) -> None: ... if sys.version_info >= (3, 11): @staticmethod @@ -93,7 +94,7 @@ class Breakpoint: def deleteMe(self) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... - def bpprint(self, out: IO[str] | None = ...) -> None: ... + def bpprint(self, out: IO[str] | None = None) -> None: ... def bpformat(self) -> str: ... def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... diff --git a/mypy/typeshed/stdlib/binascii.pyi b/mypy/typeshed/stdlib/binascii.pyi index 6f834f7868c3..759b6c39399a 100644 --- a/mypy/typeshed/stdlib/binascii.pyi +++ b/mypy/typeshed/stdlib/binascii.pyi @@ -7,17 +7,17 @@ from typing_extensions import TypeAlias _AsciiBuffer: TypeAlias = str | ReadableBuffer def a2b_uu(__data: _AsciiBuffer) -> bytes: ... -def b2a_uu(__data: ReadableBuffer, *, backtick: bool = ...) -> bytes: ... +def b2a_uu(__data: ReadableBuffer, *, backtick: bool = False) -> bytes: ... if sys.version_info >= (3, 11): - def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = ...) -> bytes: ... + def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = False) -> bytes: ... else: def a2b_base64(__data: _AsciiBuffer) -> bytes: ... -def b2a_base64(__data: ReadableBuffer, *, newline: bool = ...) -> bytes: ... -def a2b_qp(data: _AsciiBuffer, header: bool = ...) -> bytes: ... -def b2a_qp(data: ReadableBuffer, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... +def b2a_base64(__data: ReadableBuffer, *, newline: bool = True) -> bytes: ... +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... if sys.version_info < (3, 11): def a2b_hqx(__data: _AsciiBuffer) -> bytes: ... @@ -26,7 +26,7 @@ if sys.version_info < (3, 11): def b2a_hqx(__data: ReadableBuffer) -> bytes: ... def crc_hqx(__data: ReadableBuffer, __crc: int) -> int: ... -def crc32(__data: ReadableBuffer, __crc: int = ...) -> int: ... +def crc32(__data: ReadableBuffer, __crc: int = 0) -> int: ... if sys.version_info >= (3, 8): # sep must be str or bytes, not bytearray or any other buffer diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index b2241bb60527..022b540d1e48 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -32,7 +32,7 @@ from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWra from types import CodeType, TracebackType, _Cell # mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi -from typing import ( # noqa: Y027 +from typing import ( # noqa: Y022 IO, Any, BinaryIO, @@ -54,7 +54,7 @@ from typing import ( # noqa: Y027 overload, type_check_only, ) -from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -217,25 +217,25 @@ class int: if sys.version_info >= (3, 11): def to_bytes( - self, length: SupportsIndex = ..., byteorder: Literal["little", "big"] = ..., *, signed: bool = ... + self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False ) -> bytes: ... @classmethod def from_bytes( cls: type[Self], bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, - byteorder: Literal["little", "big"] = ..., + byteorder: Literal["little", "big"] = "big", *, - signed: bool = ..., + signed: bool = False, ) -> Self: ... else: - def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = ...) -> bytes: ... + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... @classmethod def from_bytes( cls: type[Self], bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, byteorder: Literal["little", "big"], *, - signed: bool = ..., + signed: bool = False, ) -> Self: ... def __add__(self, __x: int) -> int: ... @@ -266,7 +266,7 @@ class int: def __pow__(self, __x: int, __modulo: None = ...) -> Any: ... @overload def __pow__(self, __x: int, __modulo: int) -> int: ... - def __rpow__(self, __x: int, __mod: int | None = ...) -> Any: ... + def __rpow__(self, __x: int, __mod: int | None = None) -> Any: ... def __and__(self, __n: int) -> int: ... def __or__(self, __n: int) -> int: ... def __xor__(self, __n: int) -> int: ... @@ -317,11 +317,11 @@ class float: def __mod__(self, __x: float) -> float: ... def __divmod__(self, __x: float) -> tuple[float, float]: ... @overload - def __pow__(self, __x: int, __mod: None = ...) -> float: ... + def __pow__(self, __x: int, __mod: None = None) -> float: ... # positive x -> float; negative x -> complex # return type must be Any as `float | complex` causes too many false-positive errors @overload - def __pow__(self, __x: float, __mod: None = ...) -> Any: ... + def __pow__(self, __x: float, __mod: None = None) -> Any: ... def __radd__(self, __x: float) -> float: ... def __rsub__(self, __x: float) -> float: ... def __rmul__(self, __x: float) -> float: ... @@ -332,10 +332,10 @@ class float: @overload def __rpow__(self, __x: _PositiveInteger, __modulo: None = ...) -> float: ... @overload - def __rpow__(self, __x: _NegativeInteger, __mod: None = ...) -> complex: ... + def __rpow__(self, __x: _NegativeInteger, __mod: None = None) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @overload - def __rpow__(self, __x: float, __mod: None = ...) -> Any: ... + def __rpow__(self, __x: float, __mod: None = None) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... def __trunc__(self) -> int: ... if sys.version_info >= (3, 9): @@ -343,7 +343,7 @@ class float: def __floor__(self) -> int: ... @overload - def __round__(self, __ndigits: None = ...) -> int: ... + def __round__(self, __ndigits: None = None) -> int: ... @overload def __round__(self, __ndigits: SupportsIndex) -> float: ... def __eq__(self, __x: object) -> bool: ... @@ -386,12 +386,12 @@ class complex: def __add__(self, __x: complex) -> complex: ... def __sub__(self, __x: complex) -> complex: ... def __mul__(self, __x: complex) -> complex: ... - def __pow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __pow__(self, __x: complex, __mod: None = None) -> complex: ... def __truediv__(self, __x: complex) -> complex: ... def __radd__(self, __x: complex) -> complex: ... def __rsub__(self, __x: complex) -> complex: ... def __rmul__(self, __x: complex) -> complex: ... - def __rpow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __rpow__(self, __x: complex, __mod: None = None) -> complex: ... def __rtruediv__(self, __x: complex) -> complex: ... def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... @@ -413,20 +413,38 @@ class str(Sequence[str]): def __new__(cls: type[Self], object: object = ...) -> Self: ... @overload def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload def casefold(self) -> str: ... # type: ignore[misc] - def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... def endswith( self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... # type: ignore[misc] + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - def expandtabs(self, tabsize: int = ...) -> str: ... # type: ignore[misc] + @overload + def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -442,39 +460,104 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload def lower(self) -> str: ... # type: ignore[misc] - def lstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 + ) -> LiteralString: ... + @overload + def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] - def rstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] - def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] - def splitlines(self, keepends: bool = ...) -> list[str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload + def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload + def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + @overload + def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - def strip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... @staticmethod @overload - def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... + def maketrans(__x: str, __y: str) -> dict[int, int]: ... + @staticmethod + @overload + def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... + @overload def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] @@ -482,13 +565,25 @@ class str(Sequence[str]): def __ge__(self, __x: str) -> bool: ... def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + @overload + def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... @@ -500,11 +595,11 @@ class bytes(ByteString): @overload def __new__(cls: type[Self]) -> Self: ... def capitalize(self) -> bytes: ... - def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytes: ... def count( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -512,7 +607,7 @@ class bytes(ByteString): __end: SupportsIndex | None = ..., ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> bytes: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... else: def expandtabs(self, tabsize: int = ...) -> bytes: ... @@ -536,11 +631,11 @@ class bytes(ByteString): def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytes: ... - def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... def lower(self) -> bytes: ... - def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... def partition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... - def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytes: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytes: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: ReadableBuffer) -> bytes: ... def removesuffix(self, __suffix: ReadableBuffer) -> bytes: ... @@ -551,22 +646,22 @@ class bytes(ByteString): def rindex( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... def rpartition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... - def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... - def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... - def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... - def splitlines(self, keepends: bool = ...) -> list[bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def splitlines(self, keepends: bool = False) -> list[bytes]: ... def startswith( self, __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ..., ) -> bool: ... - def strip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... def swapcase(self) -> bytes: ... def title(self) -> bytes: ... - def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytes: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytes: ... def upper(self) -> bytes: ... def zfill(self, __width: SupportsIndex) -> bytes: ... @classmethod @@ -604,12 +699,12 @@ class bytearray(MutableSequence[int], ByteString): def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... def append(self, __item: SupportsIndex) -> None: ... def capitalize(self) -> bytearray: ... - def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytearray: ... def count( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def copy(self) -> bytearray: ... - def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -617,7 +712,7 @@ class bytearray(MutableSequence[int], ByteString): __end: SupportsIndex | None = ..., ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> bytearray: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... else: def expandtabs(self, tabsize: int = ...) -> bytearray: ... @@ -643,39 +738,39 @@ class bytearray(MutableSequence[int], ByteString): def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytearray: ... - def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... def lower(self) -> bytearray: ... - def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... def partition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... - def pop(self, __index: int = ...) -> int: ... + def pop(self, __index: int = -1) -> int: ... def remove(self, __value: int) -> None: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: ReadableBuffer) -> bytearray: ... def removesuffix(self, __suffix: ReadableBuffer) -> bytearray: ... - def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytearray: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytearray: ... def rfind( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def rindex( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... def rpartition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... - def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... - def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... - def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... - def splitlines(self, keepends: bool = ...) -> list[bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def splitlines(self, keepends: bool = False) -> list[bytearray]: ... def startswith( self, __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ..., ) -> bool: ... - def strip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... - def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytearray: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, __width: SupportsIndex) -> bytearray: ... @classmethod @@ -754,8 +849,10 @@ class memoryview(Sequence[int]): def __setitem__(self, __s: slice, __o: ReadableBuffer) -> None: ... @overload def __setitem__(self, __i: SupportsIndex, __o: SupportsIndex) -> None: ... - if sys.version_info >= (3, 8): - def tobytes(self, order: Literal["C", "F", "A"] | None = ...) -> bytes: ... + if sys.version_info >= (3, 10): + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... + elif sys.version_info >= (3, 8): + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... else: def tobytes(self) -> bytes: ... @@ -835,7 +932,7 @@ class tuple(Sequence[_T_co], Generic[_T_co]): def __mul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... def __rmul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... def count(self, __value: Any) -> int: ... - def index(self, __value: Any, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, __value: Any, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -861,7 +958,7 @@ class function: __module__: str # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. - def __get__(self, obj: object | None, type: type | None = ...) -> Any: ... + def __get__(self, obj: object, type: type | None = ...) -> Any: ... class list(MutableSequence[_T], Generic[_T]): @overload @@ -871,10 +968,10 @@ class list(MutableSequence[_T], Generic[_T]): def copy(self) -> list[_T]: ... def append(self, __object: _T) -> None: ... def extend(self, __iterable: Iterable[_T]) -> None: ... - def pop(self, __index: SupportsIndex = ...) -> _T: ... + def pop(self, __index: SupportsIndex = -1) -> _T: ... # Signature of `list.index` should be kept in line with `collections.UserList.index()` # and multiprocessing.managers.ListProxy.index() - def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, __value: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... def count(self, __value: _T) -> int: ... def insert(self, __index: SupportsIndex, __object: _T) -> None: ... def remove(self, __value: _T) -> None: ... @@ -884,9 +981,9 @@ class list(MutableSequence[_T], Generic[_T]): # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: list[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... @overload - def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -946,7 +1043,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, __iterable: Iterable[_T], __value: None = ...) -> dict[_T, Any | None]: ... + def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... @@ -1128,27 +1225,90 @@ if sys.version_info >= (3, 10): @overload async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ... -# TODO: `compile` has a more precise return type in reality; work on a way of expressing that? +# compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024), +# in which case it returns ast.AST. We have overloads for flag 0 (the default) and for +# explicitly passing PyCF_ONLY_AST. We fall back to Any for other values of flags. if sys.version_info >= (3, 8): + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: int = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + *, + dont_inherit: int = False, + optimize: int = -1, + _feature_version: int = -1, + ) -> CodeType: ... + @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, - flags: int = ..., - dont_inherit: int = ..., - optimize: int = ..., + flags: Literal[1024], + dont_inherit: int = False, + optimize: int = -1, *, - _feature_version: int = ..., + _feature_version: int = -1, + ) -> _ast.AST: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int, + dont_inherit: int = False, + optimize: int = -1, + *, + _feature_version: int = -1, ) -> Any: ... else: + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: int = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + *, + dont_inherit: int = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: int = False, + optimize: int = -1, + ) -> _ast.AST: ... + @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, - flags: int = ..., - dont_inherit: int = ..., - optimize: int = ..., + flags: int, + dont_inherit: int = False, + optimize: int = -1, ) -> Any: ... def copyright() -> None: ... @@ -1163,27 +1323,29 @@ def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ... # The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. # (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) def eval( - __source: str | ReadableBuffer | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, object] | None = ... + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, ) -> Any: ... # Comment above regarding `eval` applies to `exec` as well if sys.version_info >= (3, 11): def exec( __source: str | ReadableBuffer | CodeType, - __globals: dict[str, Any] | None = ..., - __locals: Mapping[str, object] | None = ..., + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, *, - closure: tuple[_Cell, ...] | None = ..., + closure: tuple[_Cell, ...] | None = None, ) -> None: ... else: def exec( __source: str | ReadableBuffer | CodeType, - __globals: dict[str, Any] | None = ..., - __locals: Mapping[str, object] | None = ..., + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, ) -> None: ... -def exit(code: sys._ExitCode = ...) -> NoReturn: ... +def exit(code: sys._ExitCode = None) -> NoReturn: ... class filter(Iterator[_T], Generic[_T]): @overload @@ -1195,7 +1357,7 @@ class filter(Iterator[_T], Generic[_T]): def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... -def format(__value: object, __format_spec: str = ...) -> str: ... +def format(__value: object, __format_spec: str = "") -> str: ... @overload def getattr(__o: object, __name: str) -> Any: ... @@ -1218,7 +1380,7 @@ def hash(__obj: object) -> int: ... def help(request: object = ...) -> None: ... def hex(__number: int | SupportsIndex) -> str: ... def id(__obj: object) -> int: ... -def input(__prompt: object = ...) -> str: ... +def input(__prompt: object = None) -> str: ... class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, __i: int) -> _T_co: ... @@ -1326,13 +1488,13 @@ _Opener: TypeAlias = Callable[[str, int], int] @overload def open( file: FileDescriptorOrPath, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @@ -1341,11 +1503,11 @@ def open( file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: Literal[0], - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @@ -1353,34 +1515,34 @@ def open( def open( file: FileDescriptorOrPath, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedRandom: ... @overload def open( file: FileDescriptorOrPath, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedWriter: ... @overload def open( file: FileDescriptorOrPath, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @@ -1388,12 +1550,12 @@ def open( def open( file: FileDescriptorOrPath, mode: OpenBinaryMode, - buffering: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BinaryIO: ... # Fallback if mode is not specified @@ -1401,12 +1563,12 @@ def open( def open( file: FileDescriptorOrPath, mode: str, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> IO[Any]: ... def ord(__c: str | bytes | bytearray) -> int: ... @@ -1416,14 +1578,14 @@ class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]): @overload def print( *values: object, - sep: str | None = ..., - end: str | None = ..., - file: SupportsWrite[str] | None = ..., - flush: Literal[False] = ..., + sep: str | None = " ", + end: str | None = "\n", + file: SupportsWrite[str] | None = None, + flush: Literal[False] = False, ) -> None: ... @overload def print( - *values: object, sep: str | None = ..., end: str | None = ..., file: _SupportsWriteAndFlush[str] | None = ..., flush: bool + *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool ) -> None: ... _E = TypeVar("_E", contravariant=True) @@ -1448,72 +1610,72 @@ if sys.version_info >= (3, 8): @overload def pow(base: int, exp: int, mod: int) -> int: ... @overload - def pow(base: int, exp: Literal[0], mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... # type: ignore[misc] @overload - def pow(base: int, exp: _PositiveInteger, mod: None = ...) -> int: ... # type: ignore[misc] + def pow(base: int, exp: _PositiveInteger, mod: None = None) -> int: ... # type: ignore[misc] @overload - def pow(base: int, exp: _NegativeInteger, mod: None = ...) -> float: ... # type: ignore[misc] + def pow(base: int, exp: _NegativeInteger, mod: None = None) -> float: ... # type: ignore[misc] # int base & positive-int exp -> int; int base & negative-int exp -> float # return type must be Any as `int | float` causes too many false-positive errors @overload - def pow(base: int, exp: int, mod: None = ...) -> Any: ... + def pow(base: int, exp: int, mod: None = None) -> Any: ... @overload - def pow(base: _PositiveInteger, exp: float, mod: None = ...) -> float: ... + def pow(base: _PositiveInteger, exp: float, mod: None = None) -> float: ... @overload - def pow(base: _NegativeInteger, exp: float, mod: None = ...) -> complex: ... + def pow(base: _NegativeInteger, exp: float, mod: None = None) -> complex: ... @overload - def pow(base: float, exp: int, mod: None = ...) -> float: ... + def pow(base: float, exp: int, mod: None = None) -> float: ... # float base & float exp could return float or complex # return type must be Any (same as complex base, complex exp), # as `float | complex` causes too many false-positive errors @overload - def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> Any: ... + def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> Any: ... @overload - def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> complex: ... + def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... @overload - def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... @overload - def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... @overload - def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M = ...) -> _T_co: ... + def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... @overload - def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = ...) -> Any: ... + def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... @overload - def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = ...) -> complex: ... + def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = None) -> complex: ... else: @overload def pow(__base: int, __exp: int, __mod: int) -> int: ... @overload - def pow(__base: int, __exp: Literal[0], __mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + def pow(__base: int, __exp: Literal[0], __mod: None = None) -> Literal[1]: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: _PositiveInteger, __mod: None = ...) -> int: ... # type: ignore[misc] + def pow(__base: int, __exp: _PositiveInteger, __mod: None = None) -> int: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: _NegativeInteger, __mod: None = ...) -> float: ... # type: ignore[misc] + def pow(__base: int, __exp: _NegativeInteger, __mod: None = None) -> float: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: int, __mod: None = ...) -> Any: ... + def pow(__base: int, __exp: int, __mod: None = None) -> Any: ... @overload - def pow(__base: _PositiveInteger, __exp: float, __mod: None = ...) -> float: ... + def pow(__base: _PositiveInteger, __exp: float, __mod: None = None) -> float: ... @overload - def pow(__base: _NegativeInteger, __exp: float, __mod: None = ...) -> complex: ... + def pow(__base: _NegativeInteger, __exp: float, __mod: None = None) -> complex: ... @overload - def pow(__base: float, __exp: int, __mod: None = ...) -> float: ... + def pow(__base: float, __exp: int, __mod: None = None) -> float: ... @overload - def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> Any: ... + def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> Any: ... @overload - def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> complex: ... + def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> complex: ... @overload - def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... @overload - def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... @overload - def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M = ...) -> _T_co: ... + def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ... @overload - def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = ...) -> Any: ... + def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = None) -> Any: ... @overload - def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = ...) -> complex: ... + def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = None) -> complex: ... -def quit(code: sys._ExitCode = ...) -> NoReturn: ... +def quit(code: sys._ExitCode = None) -> NoReturn: ... class reversed(Iterator[_T], Generic[_T]): @overload @@ -1537,7 +1699,7 @@ class _SupportsRound2(Protocol[_T_co]): def __round__(self, __ndigits: int) -> _T_co: ... @overload -def round(number: _SupportsRound1[_T], ndigits: None = ...) -> _T: ... +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... @overload def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... @@ -1546,10 +1708,10 @@ def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... def setattr(__obj: object, __name: str, __value: Any) -> None: ... @overload def sorted( - __iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ... + __iterable: Iterable[SupportsRichComparisonT], *, key: None = None, reverse: bool = False ) -> list[SupportsRichComparisonT]: ... @overload -def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> list[_T]: ... +def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... _AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) _AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) @@ -1564,11 +1726,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool], start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool], __start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... @@ -1671,10 +1833,10 @@ class zip(Iterator[_T_co], Generic[_T_co]): # Return type of `__import__` should be kept the same as return type of `importlib.import_module` def __import__( name: str, - globals: Mapping[str, object] | None = ..., - locals: Mapping[str, object] | None = ..., + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, fromlist: Sequence[str] = ..., - level: int = ..., + level: int = 0, ) -> types.ModuleType: ... def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... @@ -1848,6 +2010,7 @@ if sys.version_info >= (3, 11): # See `check_exception_group.py` for use-cases and comments. class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> None: ... @property def message(self) -> str: ... @property @@ -1885,6 +2048,7 @@ if sys.version_info >= (3, 11): class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> None: ... @property def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... # We accept a narrower type, but that's OK. diff --git a/mypy/typeshed/stdlib/bz2.pyi b/mypy/typeshed/stdlib/bz2.pyi index 295271d4a80b..8a7151d9e456 100644 --- a/mypy/typeshed/stdlib/bz2.pyi +++ b/mypy/typeshed/stdlib/bz2.pyi @@ -19,7 +19,7 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: ReadableBuffer, compresslevel: int = ...) -> bytes: ... +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... def decompress(data: ReadableBuffer) -> bytes: ... _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] @@ -30,102 +30,102 @@ _WriteTextMode: TypeAlias = Literal["wt", "xt", "at"] @overload def open( filename: _ReadableFileobj, - mode: _ReadBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: _ReadableFileobj, mode: _ReadTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: _WritableFileobj, mode: _WriteBinaryMode, - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: _WritableFileobj, mode: _WriteTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath, - mode: _ReadBinaryMode | _WriteBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode | _WriteBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: StrOrBytesPath, mode: _ReadTextMode | _WriteTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, mode: str, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BZ2File | TextIO: ... class BZ2File(BaseStream, IO[bytes]): def __enter__(self: Self) -> Self: ... if sys.version_info >= (3, 9): @overload - def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = ...) -> None: ... + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... @overload - def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = ..., *, compresslevel: int = ...) -> None: ... + def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... @overload def __init__( - self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = ..., *, compresslevel: int = ... + self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 ) -> None: ... else: @overload def __init__( - self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = ..., compresslevel: int = ... + self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = None, compresslevel: int = 9 ) -> None: ... @overload def __init__( - self, filename: _ReadableFileobj, mode: _ReadBinaryMode = ..., buffering: Any | None = ..., compresslevel: int = ... + self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", buffering: Any | None = None, compresslevel: int = 9 ) -> None: ... @overload def __init__( self, filename: StrOrBytesPath, - mode: _ReadBinaryMode | _WriteBinaryMode = ..., - buffering: Any | None = ..., - compresslevel: int = ..., + mode: _ReadBinaryMode | _WriteBinaryMode = "r", + buffering: Any | None = None, + compresslevel: int = 9, ) -> None: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... - def readline(self, size: SupportsIndex = ...) -> bytes: ... # type: ignore[override] + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] def readinto(self, b: WriteableBuffer) -> int: ... - def readlines(self, size: SupportsIndex = ...) -> list[bytes]: ... - def seek(self, offset: int, whence: int = ...) -> int: ... + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0) -> int: ... def write(self, data: ReadableBuffer) -> int: ... def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... @@ -137,7 +137,7 @@ class BZ2Compressor: @final class BZ2Decompressor: - def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property def eof(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/cProfile.pyi b/mypy/typeshed/stdlib/cProfile.pyi index 6e21fc92ade5..77608b268f6f 100644 --- a/mypy/typeshed/stdlib/cProfile.pyi +++ b/mypy/typeshed/stdlib/cProfile.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import Self, StrOrBytesPath, Unused from collections.abc import Callable from types import CodeType from typing import Any, TypeVar @@ -7,9 +7,9 @@ from typing_extensions import ParamSpec, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = ..., sort: str | int = ... + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: ... _T = TypeVar("_T") @@ -23,7 +23,7 @@ class Profile: ) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... - def print_stats(self, sort: str | int = ...) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... @@ -32,6 +32,6 @@ class Profile: def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... if sys.version_info >= (3, 8): def __enter__(self: Self) -> Self: ... - def __exit__(self, *exc_info: object) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... def label(code: str | CodeType) -> _Label: ... # undocumented diff --git a/mypy/typeshed/stdlib/calendar.pyi b/mypy/typeshed/stdlib/calendar.pyi index 74b8d39caf79..255a12d3348a 100644 --- a/mypy/typeshed/stdlib/calendar.pyi +++ b/mypy/typeshed/stdlib/calendar.pyi @@ -1,5 +1,6 @@ import datetime import sys +from _typeshed import Unused from collections.abc import Iterable, Sequence from time import struct_time from typing import ClassVar @@ -50,7 +51,7 @@ def monthrange(year: int, month: int) -> tuple[int, int]: ... class Calendar: firstweekday: int - def __init__(self, firstweekday: int = ...) -> None: ... + def __init__(self, firstweekday: int = 0) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: ... @@ -60,9 +61,9 @@ class Calendar: def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... - def yeardatescalendar(self, year: int, width: int = ...) -> list[list[int]]: ... - def yeardays2calendar(self, year: int, width: int = ...) -> list[list[tuple[int, int]]]: ... - def yeardayscalendar(self, year: int, width: int = ...) -> list[list[int]]: ... + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[int]]: ... + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[tuple[int, int]]]: ... + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[int]]: ... def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... @@ -72,21 +73,21 @@ class TextCalendar(Calendar): def formatweek(self, theweek: int, width: int) -> str: ... def formatweekday(self, day: int, width: int) -> str: ... def formatweekheader(self, width: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... - def prmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... - def formatmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... - def formatyear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... - def pryear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... def firstweekday() -> int: ... def monthcalendar(year: int, month: int) -> list[list[int]]: ... def prweek(theweek: int, width: int) -> None: ... def week(theweek: int, width: int) -> str: ... def weekheader(width: int) -> str: ... -def prmonth(theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... -def month(theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... -def calendar(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... -def prcal(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... class HTMLCalendar(Calendar): cssclasses: ClassVar[list[str]] @@ -100,29 +101,31 @@ class HTMLCalendar(Calendar): def formatweek(self, theweek: int) -> str: ... def formatweekday(self, day: int) -> str: ... def formatweekheader(self) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... - def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... - def formatyear(self, theyear: int, width: int = ...) -> str: ... - def formatyearpage(self, theyear: int, width: int = ..., css: str | None = ..., encoding: str | None = ...) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatyear(self, theyear: int, width: int = 3) -> str: ... + def formatyearpage( + self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None + ) -> str: ... class different_locale: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class LocaleTextCalendar(TextCalendar): - def __init__(self, firstweekday: int = ..., locale: _LocaleType | None = ...) -> None: ... + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... class LocaleHTMLCalendar(HTMLCalendar): - def __init__(self, firstweekday: int = ..., locale: _LocaleType | None = ...) -> None: ... + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... def formatweekday(self, day: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... -def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... -def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... # Data attributes diff --git a/mypy/typeshed/stdlib/cgi.pyi b/mypy/typeshed/stdlib/cgi.pyi index ce9a15415aab..6f5637e3cce1 100644 --- a/mypy/typeshed/stdlib/cgi.pyi +++ b/mypy/typeshed/stdlib/cgi.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self, SupportsGetItem, SupportsItemAccess +from _typeshed import Self, SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type from collections.abc import Iterable, Iterator, Mapping from email.message import Message @@ -25,11 +25,11 @@ if sys.version_info < (3, 8): __all__ += ["parse_qs", "parse_qsl", "escape"] def parse( - fp: IO[Any] | None = ..., + fp: IO[Any] | None = None, environ: SupportsItemAccess[str, str] = ..., keep_blank_values: bool = ..., strict_parsing: bool = ..., - separator: str = ..., + separator: str = "&", ) -> dict[str, list[str]]: ... if sys.version_info < (3, 8): @@ -37,7 +37,7 @@ if sys.version_info < (3, 8): def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> list[tuple[str, str]]: ... def parse_multipart( - fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = ..., errors: str = ..., separator: str = ... + fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" ) -> dict[str, list[Any]]: ... class _Environ(Protocol): @@ -52,7 +52,7 @@ def print_directory() -> None: ... def print_environ_usage() -> None: ... if sys.version_info < (3, 8): - def escape(s: str, quote: bool | None = ...) -> str: ... + def escape(s: str, quote: bool | None = None) -> str: ... class MiniFieldStorage: # The first five "Any" attributes here are always None, but mypy doesn't support that @@ -93,24 +93,24 @@ class FieldStorage: value: None | bytes | _list[Any] def __init__( self, - fp: IO[Any] | None = ..., - headers: Mapping[str, str] | Message | None = ..., - outerboundary: bytes = ..., + fp: IO[Any] | None = None, + headers: Mapping[str, str] | Message | None = None, + outerboundary: bytes = b"", environ: SupportsGetItem[str, str] = ..., - keep_blank_values: int = ..., - strict_parsing: int = ..., - limit: int | None = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: int = 0, + strict_parsing: int = 0, + limit: int | None = None, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... def __getitem__(self, key: str) -> Any: ... - def getvalue(self, key: str, default: Any = ...) -> Any: ... - def getfirst(self, key: str, default: Any = ...) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... def getlist(self, key: str) -> _list[Any]: ... def keys(self) -> _list[str]: ... def __contains__(self, key: str) -> bool: ... @@ -120,9 +120,9 @@ class FieldStorage: def make_file(self) -> IO[Any]: ... def print_exception( - type: type[BaseException] | None = ..., - value: BaseException | None = ..., - tb: TracebackType | None = ..., - limit: int | None = ..., + type: type[BaseException] | None = None, + value: BaseException | None = None, + tb: TracebackType | None = None, + limit: int | None = None, ) -> None: ... def print_arguments() -> None: ... diff --git a/mypy/typeshed/stdlib/cgitb.pyi b/mypy/typeshed/stdlib/cgitb.pyi index ea5a8341bc5e..04bcbfb0d13d 100644 --- a/mypy/typeshed/stdlib/cgitb.pyi +++ b/mypy/typeshed/stdlib/cgitb.pyi @@ -13,20 +13,20 @@ def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | N def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] ) -> list[tuple[str, str | None, Any]]: ... # undocumented -def html(einfo: OptExcInfo, context: int = ...) -> str: ... -def text(einfo: OptExcInfo, context: int = ...) -> str: ... +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... class Hook: # undocumented def __init__( self, - display: int = ..., - logdir: StrOrBytesPath | None = ..., - context: int = ..., - file: IO[str] | None = ..., - format: str = ..., + display: int = 1, + logdir: StrOrBytesPath | None = None, + context: int = 5, + file: IO[str] | None = None, + format: str = "html", ) -> None: ... def __call__(self, etype: type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... - def handle(self, info: OptExcInfo | None = ...) -> None: ... + def handle(self, info: OptExcInfo | None = None) -> None: ... -def handler(info: OptExcInfo | None = ...) -> None: ... -def enable(display: int = ..., logdir: StrOrBytesPath | None = ..., context: int = ..., format: str = ...) -> None: ... +def handler(info: OptExcInfo | None = None) -> None: ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/mypy/typeshed/stdlib/chunk.pyi b/mypy/typeshed/stdlib/chunk.pyi index 50ff267c5436..9788d35f680c 100644 --- a/mypy/typeshed/stdlib/chunk.pyi +++ b/mypy/typeshed/stdlib/chunk.pyi @@ -9,12 +9,12 @@ class Chunk: size_read: int offset: int seekable: bool - def __init__(self, file: IO[bytes], align: bool = ..., bigendian: bool = ..., inclheader: bool = ...) -> None: ... + def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... def getname(self) -> bytes: ... def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... - def seek(self, pos: int, whence: int = ...) -> None: ... + def seek(self, pos: int, whence: int = 0) -> None: ... def tell(self) -> int: ... - def read(self, size: int = ...) -> bytes: ... + def read(self, size: int = -1) -> bytes: ... def skip(self) -> None: ... diff --git a/mypy/typeshed/stdlib/cmath.pyi b/mypy/typeshed/stdlib/cmath.pyi index 30ada5d5b5ef..0a85600e99b7 100644 --- a/mypy/typeshed/stdlib/cmath.pyi +++ b/mypy/typeshed/stdlib/cmath.pyi @@ -27,7 +27,7 @@ def atanh(__z: _C) -> complex: ... def cos(__z: _C) -> complex: ... def cosh(__z: _C) -> complex: ... def exp(__z: _C) -> complex: ... -def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = ..., abs_tol: SupportsFloat = ...) -> bool: ... +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... def isinf(__z: _C) -> bool: ... def isnan(__z: _C) -> bool: ... def log(__x: _C, __y_obj: _C = ...) -> complex: ... diff --git a/mypy/typeshed/stdlib/cmd.pyi b/mypy/typeshed/stdlib/cmd.pyi index ddefff2edf05..b658a873410b 100644 --- a/mypy/typeshed/stdlib/cmd.pyi +++ b/mypy/typeshed/stdlib/cmd.pyi @@ -23,9 +23,9 @@ class Cmd: stdout: IO[str] cmdqueue: list[str] completekey: str - def __init__(self, completekey: str = ..., stdin: IO[str] | None = ..., stdout: IO[str] | None = ...) -> None: ... + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... old_completer: Callable[[str, int], str | None] | None - def cmdloop(self, intro: Any | None = ...) -> None: ... + def cmdloop(self, intro: Any | None = None) -> None: ... def precmd(self, line: str) -> str: ... def postcmd(self, stop: bool, line: str) -> bool: ... def preloop(self) -> None: ... @@ -43,4 +43,4 @@ class Cmd: def complete_help(self, *args: Any) -> list[str]: ... def do_help(self, arg: str) -> bool | None: ... def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... - def columnize(self, list: list[str] | None, displaywidth: int = ...) -> None: ... + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... diff --git a/mypy/typeshed/stdlib/code.pyi b/mypy/typeshed/stdlib/code.pyi index 59318aa353e2..4715bd866ddc 100644 --- a/mypy/typeshed/stdlib/code.pyi +++ b/mypy/typeshed/stdlib/code.pyi @@ -8,26 +8,26 @@ __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_ class InteractiveInterpreter: locals: Mapping[str, Any] # undocumented compile: CommandCompiler # undocumented - def __init__(self, locals: Mapping[str, Any] | None = ...) -> None: ... - def runsource(self, source: str, filename: str = ..., symbol: str = ...) -> bool: ... + def __init__(self, locals: Mapping[str, Any] | None = None) -> None: ... + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... def runcode(self, code: CodeType) -> None: ... - def showsyntaxerror(self, filename: str | None = ...) -> None: ... + def showsyntaxerror(self, filename: str | None = None) -> None: ... def showtraceback(self) -> None: ... def write(self, data: str) -> None: ... class InteractiveConsole(InteractiveInterpreter): buffer: list[str] # undocumented filename: str # undocumented - def __init__(self, locals: Mapping[str, Any] | None = ..., filename: str = ...) -> None: ... - def interact(self, banner: str | None = ..., exitmsg: str | None = ...) -> None: ... + def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: ... + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... def push(self, line: str) -> bool: ... def resetbuffer(self) -> None: ... - def raw_input(self, prompt: str = ...) -> str: ... + def raw_input(self, prompt: str = "") -> str: ... def interact( - banner: str | None = ..., - readfunc: Callable[[str], str] | None = ..., - local: Mapping[str, Any] | None = ..., - exitmsg: str | None = ..., + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: Mapping[str, Any] | None = None, + exitmsg: str | None = None, ) -> None: ... -def compile_command(source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index cd6ac0006c53..33d0e6709923 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,3 +1,4 @@ +import sys import types from _codecs import * from _typeshed import ReadableBuffer, Self @@ -112,13 +113,13 @@ class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): cls: type[Self], encode: _Encoder, decode: _Decoder, - streamreader: _StreamReader | None = ..., - streamwriter: _StreamWriter | None = ..., - incrementalencoder: _IncrementalEncoder | None = ..., - incrementaldecoder: _IncrementalDecoder | None = ..., - name: str | None = ..., + streamreader: _StreamReader | None = None, + streamwriter: _StreamWriter | None = None, + incrementalencoder: _IncrementalEncoder | None = None, + incrementaldecoder: _IncrementalDecoder | None = None, + name: str | None = None, *, - _is_text_encoding: bool | None = ..., + _is_text_encoding: bool | None = None, ) -> Self: ... def getencoder(encoding: str) -> _Encoder: ... @@ -127,12 +128,20 @@ def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: ... def getwriter(encoding: str) -> _StreamWriter: ... -def open( - filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., buffering: int = ... -) -> StreamReaderWriter: ... -def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = ..., errors: str = ...) -> StreamRecoder: ... -def iterencode(iterator: Iterable[str], encoding: str, errors: str = ...) -> Generator[bytes, None, None]: ... -def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = ...) -> Generator[str, None, None]: ... + +if sys.version_info >= (3, 8): + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 + ) -> StreamReaderWriter: ... + +else: + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = 1 + ) -> StreamReaderWriter: ... + +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` BOM_BE: Literal[b"\xfe\xff"] @@ -155,14 +164,14 @@ def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... class Codec: # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. - def encode(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... - def decode(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... class IncrementalEncoder: errors: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def encode(self, input: str, final: bool = ...) -> bytes: ... + def encode(self, input: str, final: bool = False) -> bytes: ... def reset(self) -> None: ... # documentation says int but str is needed for the subclass. def getstate(self) -> int | str: ... @@ -170,9 +179,9 @@ class IncrementalEncoder: class IncrementalDecoder: errors: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... def reset(self) -> None: ... def getstate(self) -> tuple[bytes, int]: ... def setstate(self, state: tuple[bytes, int]) -> None: ... @@ -180,24 +189,24 @@ class IncrementalDecoder: # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): buffer: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def _buffer_encode(self, input: str, errors: str, final: bool) -> bytes: ... - def encode(self, input: str, final: bool = ...) -> bytes: ... + def _buffer_encode(self, input: str, errors: str, final: bool) -> tuple[bytes, int]: ... + def encode(self, input: str, final: bool = False) -> bytes: ... class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... - def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. class StreamWriter(Codec): stream: _WritableStream errors: str - def __init__(self, stream: _WritableStream, errors: str = ...) -> None: ... + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... def write(self, object: str) -> None: ... def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... @@ -208,10 +217,10 @@ class StreamWriter(Codec): class StreamReader(Codec): stream: _ReadableStream errors: str - def __init__(self, stream: _ReadableStream, errors: str = ...) -> None: ... - def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> str: ... - def readline(self, size: int | None = ..., keepends: bool = ...) -> str: ... - def readlines(self, sizehint: int | None = ..., keepends: bool = ...) -> list[str]: ... + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... + def readline(self, size: int | None = None, keepends: bool = True) -> str: ... + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... def reset(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... @@ -223,16 +232,16 @@ class StreamReader(Codec): # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): stream: _Stream - def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... - def read(self, size: int = ...) -> str: ... - def readline(self, size: int | None = ...) -> str: ... - def readlines(self, sizehint: int | None = ...) -> list[str]: ... + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... + def read(self, size: int = -1) -> str: ... + def readline(self, size: int | None = None) -> str: ... + def readlines(self, sizehint: int | None = None) -> list[str]: ... def __next__(self) -> str: ... def __iter__(self: Self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... - def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] def __enter__(self: Self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... @@ -250,11 +259,17 @@ class StreamReaderWriter(TextIO): class StreamRecoder(BinaryIO): def __init__( - self, stream: _Stream, encode: _Encoder, decode: _Decoder, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ... + self, + stream: _Stream, + encode: _Encoder, + decode: _Decoder, + Reader: _StreamReader, + Writer: _StreamWriter, + errors: str = "strict", ) -> None: ... - def read(self, size: int = ...) -> bytes: ... - def readline(self, size: int | None = ...) -> bytes: ... - def readlines(self, sizehint: int | None = ...) -> list[bytes]: ... + def read(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = None) -> bytes: ... + def readlines(self, sizehint: int | None = None) -> list[bytes]: ... def __next__(self) -> bytes: ... def __iter__(self: Self) -> Self: ... def write(self, data: bytes) -> None: ... # type: ignore[override] @@ -263,7 +278,7 @@ class StreamRecoder(BinaryIO): def __getattr__(self, name: str) -> Any: ... def __enter__(self: Self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/codeop.pyi b/mypy/typeshed/stdlib/codeop.pyi index 36af1d297548..6a51b7786384 100644 --- a/mypy/typeshed/stdlib/codeop.pyi +++ b/mypy/typeshed/stdlib/codeop.pyi @@ -2,7 +2,7 @@ from types import CodeType __all__ = ["compile_command", "Compile", "CommandCompiler"] -def compile_command(source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... class Compile: flags: int @@ -10,4 +10,4 @@ class Compile: class CommandCompiler: compiler: Compile - def __call__(self, source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 2955aa3b3cd0..d4c537b1384e 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -40,18 +40,18 @@ def namedtuple( typename: str, field_names: str | Iterable[str], *, - rename: bool = ..., - module: str | None = ..., - defaults: Iterable[Any] | None = ..., + rename: bool = False, + module: str | None = None, + defaults: Iterable[Any] | None = None, ) -> type[tuple[Any, ...]]: ... class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): data: dict[_KT, _VT] # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics @overload - def __init__(self, __dict: None = ...) -> None: ... + def __init__(self, __dict: None = None) -> None: ... @overload - def __init__(self: UserDict[str, _VT], __dict: None = ..., **kwargs: _VT) -> None: ... + def __init__(self: UserDict[str, _VT], __dict: None = None, **kwargs: _VT) -> None: ... @overload def __init__(self, __dict: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... @overload @@ -76,7 +76,7 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> UserDict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... @@ -92,7 +92,7 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): class UserList(MutableSequence[_T]): data: list[_T] @overload - def __init__(self, initlist: None = ...) -> None: ... + def __init__(self, initlist: None = None) -> None: ... @overload def __init__(self, initlist: Iterable[_T]) -> None: ... def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... @@ -119,7 +119,7 @@ class UserList(MutableSequence[_T]): def __imul__(self: Self, n: int) -> Self: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... - def pop(self, i: int = ...) -> _T: ... + def pop(self, i: int = -1) -> _T: ... def remove(self, item: _T) -> None: ... def copy(self: Self) -> Self: ... def __copy__(self: Self) -> Self: ... @@ -163,18 +163,18 @@ class UserString(Sequence[UserString]): def capitalize(self: Self) -> Self: ... def casefold(self: Self) -> Self: ... def center(self: Self, width: int, *args: Any) -> Self: ... - def count(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... if sys.version_info >= (3, 8): - def encode(self: UserString, encoding: str | None = ..., errors: str | None = ...) -> bytes: ... + def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ... else: - def encode(self: Self, encoding: str | None = ..., errors: str | None = ...) -> Self: ... + def encode(self: Self, encoding: str | None = None, errors: str | None = None) -> Self: ... - def endswith(self, suffix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def expandtabs(self: Self, tabsize: int = ...) -> Self: ... - def find(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def expandtabs(self: Self, tabsize: int = 8) -> Self: ... + def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... def format_map(self, mapping: Mapping[str, Any]) -> str: ... - def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... + def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ... def isalpha(self) -> bool: ... def isalnum(self) -> bool: ... def isdecimal(self) -> bool: ... @@ -190,29 +190,24 @@ class UserString(Sequence[UserString]): def join(self, seq: Iterable[str]) -> str: ... def ljust(self: Self, width: int, *args: Any) -> Self: ... def lower(self: Self) -> Self: ... - def lstrip(self: Self, chars: str | None = ...) -> Self: ... - @staticmethod - @overload - def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... - @staticmethod - @overload - def maketrans(x: str, y: str, z: str = ...) -> dict[int, int | None]: ... + def lstrip(self: Self, chars: str | None = None) -> Self: ... + maketrans = str.maketrans def partition(self, sep: str) -> tuple[str, str, str]: ... if sys.version_info >= (3, 9): def removeprefix(self: Self, __prefix: str | UserString) -> Self: ... def removesuffix(self: Self, __suffix: str | UserString) -> Self: ... - def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = ...) -> Self: ... - def rfind(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... - def rindex(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... + def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def rjust(self: Self, width: int, *args: Any) -> Self: ... def rpartition(self, sep: str) -> tuple[str, str, str]: ... - def rstrip(self: Self, chars: str | None = ...) -> Self: ... - def split(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... - def rsplit(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... - def splitlines(self, keepends: bool = ...) -> list[str]: ... - def startswith(self, prefix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def strip(self: Self, chars: str | None = ...) -> Self: ... + def rstrip(self: Self, chars: str | None = None) -> Self: ... + def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def splitlines(self, keepends: bool = False) -> list[str]: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def strip(self: Self, chars: str | None = None) -> Self: ... def swapcase(self: Self) -> Self: ... def title(self: Self) -> Self: ... def translate(self: Self, *args: Any) -> Self: ... @@ -259,20 +254,20 @@ class deque(MutableSequence[_T], Generic[_T]): class Counter(dict[_T, int], Generic[_T]): @overload - def __init__(self, __iterable: None = ...) -> None: ... + def __init__(self, __iterable: None = None) -> None: ... @overload - def __init__(self: Counter[str], __iterable: None = ..., **kwargs: int) -> None: ... + def __init__(self: Counter[str], __iterable: None = None, **kwargs: int) -> None: ... @overload def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... @overload def __init__(self, __iterable: Iterable[_T]) -> None: ... def copy(self: Self) -> Self: ... def elements(self) -> Iterator[_T]: ... - def most_common(self, n: int | None = ...) -> list[tuple[_T, int]]: ... + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... @classmethod - def fromkeys(cls, iterable: Any, v: int | None = ...) -> NoReturn: ... # type: ignore[override] + def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] @overload - def subtract(self, __iterable: None = ...) -> None: ... + def subtract(self, __iterable: None = None) -> None: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload @@ -341,8 +336,8 @@ class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT def __reversed__(self) -> Iterator[_VT_co]: ... class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): - def popitem(self, last: bool = ...) -> tuple[_KT, _VT]: ... - def move_to_end(self, key: _KT, last: bool = ...) -> None: ... + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = True) -> None: ... def copy(self: Self) -> Self: ... def __reversed__(self) -> Iterator[_KT]: ... def keys(self) -> _odict_keys[_KT, _VT]: ... @@ -353,13 +348,13 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> OrderedDict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: OrderedDict[_KT, _T | None], key: _KT) -> _T | None: ... + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... @@ -398,7 +393,7 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... - def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = ...) -> Self: ... + def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... @property def parents(self: Self) -> Self: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... @@ -409,7 +404,11 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __contains__(self, key: object) -> bool: ... def __missing__(self, key: _KT) -> _VT: ... # undocumented def __bool__(self) -> bool: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @overload diff --git a/mypy/typeshed/stdlib/compileall.pyi b/mypy/typeshed/stdlib/compileall.pyi index 4621500eda96..7520c2f5b676 100644 --- a/mypy/typeshed/stdlib/compileall.pyi +++ b/mypy/typeshed/stdlib/compileall.pyi @@ -11,101 +11,101 @@ class _SupportsSearch(Protocol): if sys.version_info >= (3, 10): def compile_dir( dir: StrPath, - maxlevels: int | None = ..., - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - workers: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: StrPath | None = ..., - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... def compile_file( fullname: StrPath, - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: StrPath | None = ..., - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... elif sys.version_info >= (3, 9): def compile_dir( dir: StrPath, - maxlevels: int | None = ..., - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - workers: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: str | None = ..., # https://bugs.python.org/issue40447 - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... def compile_file( fullname: StrPath, - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: str | None = ..., # https://bugs.python.org/issue40447 - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... else: def compile_dir( dir: StrPath, - maxlevels: int = ..., - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - workers: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int = 10, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... def compile_file( fullname: StrPath, - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... def compile_path( skip_curdir: bool = ..., - maxlevels: int = ..., - force: bool = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int = 0, + force: bool = False, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi index 3db968878498..64084a884433 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi @@ -1,6 +1,6 @@ import sys import threading -from _typeshed import Self +from _typeshed import Self, Unused from collections.abc import Callable, Iterable, Iterator, Sequence from logging import Logger from types import TracebackType @@ -40,10 +40,10 @@ class Future(Generic[_T]): def running(self) -> bool: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... - def result(self, timeout: float | None = ...) -> _T: ... + def result(self, timeout: float | None = None) -> _T: ... def set_running_or_notify_cancel(self) -> bool: ... def set_result(self, result: _T) -> None: ... - def exception(self, timeout: float | None = ...) -> BaseException | None: ... + def exception(self, timeout: float | None = None) -> BaseException | None: ... def set_exception(self, exception: BaseException | None) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -55,19 +55,19 @@ class Executor: def submit(self, fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... def map( - self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = ..., chunksize: int = ... + self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 ) -> Iterator[_T]: ... if sys.version_info >= (3, 9): - def shutdown(self, wait: bool = ..., *, cancel_futures: bool = ...) -> None: ... + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... else: - def shutdown(self, wait: bool = ...) -> None: ... + def shutdown(self, wait: bool = True) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... -def as_completed(fs: Iterable[Future[_T]], timeout: float | None = ...) -> Iterator[Future[_T]]: ... +def as_completed(fs: Iterable[Future[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... # Ideally this would be a namedtuple, but mypy doesn't support generic tuple types. See #1976 class DoneAndNotDoneFutures(Sequence[set[Future[_T]]]): @@ -84,7 +84,9 @@ class DoneAndNotDoneFutures(Sequence[set[Future[_T]]]): @overload def __getitem__(self, __s: slice) -> DoneAndNotDoneFutures[_T]: ... -def wait(fs: Iterable[Future[_T]], timeout: float | None = ..., return_when: str = ...) -> DoneAndNotDoneFutures[_T]: ... +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... class _Waiter: event: threading.Event @@ -108,4 +110,4 @@ class _AcquireFutures: futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/process.pyi b/mypy/typeshed/stdlib/concurrent/futures/process.pyi index a98702d095a2..85af2e7f84c7 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/process.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/process.pyi @@ -55,10 +55,10 @@ class _ResultItem: if sys.version_info >= (3, 11): exit_pid: int | None def __init__( - self, work_id: int, exception: Exception | None = ..., result: Any | None = ..., exit_pid: int | None = ... + self, work_id: int, exception: Exception | None = None, result: Any | None = None, exit_pid: int | None = None ) -> None: ... else: - def __init__(self, work_id: int, exception: Exception | None = ..., result: Any | None = ...) -> None: ... + def __init__(self, work_id: int, exception: Exception | None = None, result: Any | None = None) -> None: ... class _CallItem: work_id: int @@ -74,7 +74,7 @@ class _SafeQueue(Queue[Future[Any]]): if sys.version_info >= (3, 9): def __init__( self, - max_size: int | None = ..., + max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]], @@ -83,7 +83,7 @@ class _SafeQueue(Queue[Future[Any]]): ) -> None: ... else: def __init__( - self, max_size: int | None = ..., *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] + self, max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] ) -> None: ... def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... @@ -95,14 +95,14 @@ if sys.version_info >= (3, 11): def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, - result: Any | None = ..., - exception: Exception | None = ..., - exit_pid: int | None = ..., + result: Any | None = None, + exception: Exception | None = None, + exit_pid: int | None = None, ) -> None: ... else: def _sendback_result( - result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = ..., exception: Exception | None = ... + result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None ) -> None: ... if sys.version_info >= (3, 11): @@ -111,7 +111,7 @@ if sys.version_info >= (3, 11): result_queue: SimpleQueue[_ResultItem], initializer: Callable[..., object] | None, initargs: tuple[Any, ...], - max_tasks: int | None = ..., + max_tasks: int | None = None, ) -> None: ... else: @@ -171,19 +171,19 @@ class ProcessPoolExecutor(Executor): if sys.version_info >= (3, 11): def __init__( self, - max_workers: int | None = ..., - mp_context: BaseContext | None = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., *, - max_tasks_per_child: int | None = ..., + max_tasks_per_child: int | None = None, ) -> None: ... else: def __init__( self, - max_workers: int | None = ..., - mp_context: BaseContext | None = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., ) -> None: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi index 387ce0d7e438..e43dd3dfa33a 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi @@ -50,9 +50,9 @@ class ThreadPoolExecutor(Executor): _work_queue: queue.SimpleQueue[_WorkItem[Any]] def __init__( self, - max_workers: int | None = ..., - thread_name_prefix: str = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., ) -> None: ... def _adjust_thread_count(self) -> None: ... diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi index 00a23588b602..2c5b68385767 100644 --- a/mypy/typeshed/stdlib/configparser.pyi +++ b/mypy/typeshed/stdlib/configparser.pyi @@ -65,32 +65,48 @@ class RawConfigParser(_Parser): @overload def __init__( self, - defaults: Mapping[str, str | None] | None = ..., + defaults: Mapping[str, str | None] | None = None, dict_type: type[Mapping[str, str]] = ..., - allow_no_value: Literal[True] = ..., *, + allow_no_value: Literal[True], delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., - inline_comment_prefixes: Sequence[str] | None = ..., - strict: bool = ..., - empty_lines_in_values: bool = ..., - default_section: str = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", interpolation: Interpolation | None = ..., converters: _ConvertersMap = ..., ) -> None: ... @overload def __init__( self, - defaults: _Section | None = ..., + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ..., + comment_prefixes: Sequence[str] = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, dict_type: type[Mapping[str, str]] = ..., - allow_no_value: bool = ..., + allow_no_value: bool = False, *, delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., - inline_comment_prefixes: Sequence[str] | None = ..., - strict: bool = ..., - empty_lines_in_values: bool = ..., - default_section: str = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", interpolation: Interpolation | None = ..., converters: _ConvertersMap = ..., ) -> None: ... @@ -106,30 +122,30 @@ class RawConfigParser(_Parser): def has_section(self, section: str) -> bool: ... def options(self, section: str) -> list[str]: ... def has_option(self, section: str, option: str) -> bool: ... - def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = ...) -> list[str]: ... - def read_file(self, f: Iterable[str], source: str | None = ...) -> None: ... - def read_string(self, string: str, source: str = ...) -> None: ... - def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = ...) -> None: ... - def readfp(self, fp: Iterable[str], filename: str | None = ...) -> None: ... + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... + def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... + def read_string(self, string: str, source: str = "") -> None: ... + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload - def getint(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> int: ... + def getint(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... @overload def getint( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> int | _T: ... @overload - def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> float: ... + def getfloat(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... @overload def getfloat( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> float | _T: ... @overload - def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> bool: ... + def getboolean(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... @overload def getboolean( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> bool | _T: ... def _get_conv( self, @@ -137,21 +153,23 @@ class RawConfigParser(_Parser): option: str, conv: Callable[[str], _T], *, - raw: bool = ..., - vars: _Section | None = ..., + raw: bool = False, + vars: _Section | None = None, fallback: _T = ..., ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> str | Any: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | Any: ... @overload - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T) -> str | _T | Any: ... + def get( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T | Any: ... @overload - def items(self, *, raw: bool = ..., vars: _Section | None = ...) -> ItemsView[str, SectionProxy]: ... + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... @overload - def items(self, section: str, raw: bool = ..., vars: _Section | None = ...) -> list[tuple[str, str]]: ... - def set(self, section: str, option: str, value: str | None = ...) -> None: ... - def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = ...) -> None: ... + def items(self, section: str, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... + def set(self, section: str, option: str, value: str | None = None) -> None: ... + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... def remove_option(self, section: str, option: str) -> bool: ... def remove_section(self, section: str) -> bool: ... def optionxform(self, optionstr: str) -> str: ... @@ -159,9 +177,9 @@ class RawConfigParser(_Parser): class ConfigParser(RawConfigParser): # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> str: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... @overload - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T) -> str | _T: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T) -> str | _T: ... if sys.version_info < (3, 12): class SafeConfigParser(ConfigParser): ... # deprecated alias @@ -181,11 +199,11 @@ class SectionProxy(MutableMapping[str, str]): def get( # type: ignore[override] self, option: str, - fallback: str | None = ..., + fallback: str | None = None, *, - raw: bool = ..., - vars: _Section | None = ..., - _impl: Any | None = ..., + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, **kwargs: Any, ) -> str | Any: ... # can be None in RawConfigParser's sections # These are partially-applied version of the methods with the same names in @@ -216,7 +234,7 @@ class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): class Error(Exception): message: str - def __init__(self, msg: str = ...) -> None: ... + def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): section: str @@ -226,14 +244,14 @@ class DuplicateSectionError(Error): section: str source: str | None lineno: int | None - def __init__(self, section: str, source: str | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, section: str, source: str | None = None, lineno: int | None = None) -> None: ... class DuplicateOptionError(Error): section: str option: str source: str | None lineno: int | None - def __init__(self, section: str, option: str, source: str | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, section: str, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): section: str @@ -257,7 +275,7 @@ class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str errors: list[tuple[int, str]] - def __init__(self, source: str | None = ..., filename: str | None = ...) -> None: ... + def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... def append(self, lineno: int, line: str) -> None: ... class MissingSectionHeaderError(ParsingError): diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index 1a6642b643e3..522285abbc72 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -1,6 +1,6 @@ import abc import sys -from _typeshed import FileDescriptorOrPath, Self +from _typeshed import FileDescriptorOrPath, Self, Unused from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType @@ -108,7 +108,7 @@ _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) class closing(AbstractContextManager[_SupportsCloseT]): def __init__(self, thing: _SupportsCloseT) -> None: ... - def __exit__(self, *exc_info: object) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... if sys.version_info >= (3, 10): class _SupportsAclose(Protocol): @@ -117,7 +117,7 @@ if sys.version_info >= (3, 10): class aclosing(AbstractAsyncContextManager[_SupportsAcloseT]): def __init__(self, thing: _SupportsAcloseT) -> None: ... - async def __aexit__(self, *exc_info: object) -> None: ... + async def __aexit__(self, *exc_info: Unused) -> None: ... class suppress(AbstractContextManager[None]): def __init__(self, *exceptions: type[BaseException]) -> None: ... @@ -174,23 +174,23 @@ if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T], AbstractAsyncContextManager[_T]): enter_result: _T @overload - def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @overload def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... def __enter__(self) -> _T: ... - def __exit__(self, *exctype: object) -> None: ... + def __exit__(self, *exctype: Unused) -> None: ... async def __aenter__(self) -> _T: ... - async def __aexit__(self, *exctype: object) -> None: ... + async def __aexit__(self, *exctype: Unused) -> None: ... else: class nullcontext(AbstractContextManager[_T]): enter_result: _T @overload - def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @overload def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... def __enter__(self) -> _T: ... - def __exit__(self, *exctype: object) -> None: ... + def __exit__(self, *exctype: Unused) -> None: ... if sys.version_info >= (3, 11): _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) @@ -199,4 +199,4 @@ if sys.version_info >= (3, 11): path: _T_fd_or_any_path def __init__(self, path: _T_fd_or_any_path) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *excinfo: object) -> None: ... + def __exit__(self, *excinfo: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/copy.pyi b/mypy/typeshed/stdlib/copy.pyi index b53f418b3930..f68965d3dc91 100644 --- a/mypy/typeshed/stdlib/copy.pyi +++ b/mypy/typeshed/stdlib/copy.pyi @@ -8,7 +8,7 @@ _T = TypeVar("_T") PyStringMap: Any # Note: memo and _nil are internal kwargs. -def deepcopy(x: _T, memo: dict[int, Any] | None = ..., _nil: Any = ...) -> _T: ... +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = ...) -> _T: ... def copy(x: _T) -> _T: ... class Error(Exception): ... diff --git a/mypy/typeshed/stdlib/copyreg.pyi b/mypy/typeshed/stdlib/copyreg.pyi index 4403550b587e..07338b422385 100644 --- a/mypy/typeshed/stdlib/copyreg.pyi +++ b/mypy/typeshed/stdlib/copyreg.pyi @@ -10,7 +10,7 @@ __all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_ def pickle( ob_type: type[_T], pickle_function: Callable[[_T], str | _Reduce[_T]], - constructor_ob: Callable[[_Reduce[_T]], _T] | None = ..., + constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, ) -> None: ... def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... diff --git a/mypy/typeshed/stdlib/crypt.pyi b/mypy/typeshed/stdlib/crypt.pyi index 83ad45d5c155..1ad0a384eae7 100644 --- a/mypy/typeshed/stdlib/crypt.pyi +++ b/mypy/typeshed/stdlib/crypt.pyi @@ -8,5 +8,5 @@ if sys.platform != "win32": METHOD_SHA512: _Method METHOD_BLOWFISH: _Method methods: list[_Method] - def mksalt(method: _Method | None = ..., *, rounds: int | None = ...) -> str: ... - def crypt(word: str, salt: str | _Method | None = ...) -> str: ... + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index 8802d6b0a5f5..13b483b219d5 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -76,9 +76,9 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): self, f: Iterable[str], fieldnames: Sequence[_T], - restkey: str | None = ..., - restval: str | None = ..., - dialect: _DialectLike = ..., + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -93,10 +93,10 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): def __init__( self: DictReader[str], f: Iterable[str], - fieldnames: Sequence[str] | None = ..., - restkey: str | None = ..., - restval: str | None = ..., - dialect: _DialectLike = ..., + fieldnames: Sequence[str] | None = None, + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -121,9 +121,9 @@ class DictWriter(Generic[_T]): self, f: SupportsWrite[str], fieldnames: Collection[_T], - restval: Any | None = ..., - extrasaction: Literal["raise", "ignore"] = ..., - dialect: _DialectLike = ..., + restval: Any | None = "", + extrasaction: Literal["raise", "ignore"] = "raise", + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -146,5 +146,5 @@ class DictWriter(Generic[_T]): class Sniffer: preferred: list[str] - def sniff(self, sample: str, delimiters: str | None = ...) -> type[Dialect]: ... + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... def has_header(self, sample: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 2e26a08f81f9..cd31a36a354d 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -26,14 +26,19 @@ class CDLL: self, name: str | None, mode: int = ..., - handle: int | None = ..., - use_errno: bool = ..., - use_last_error: bool = ..., - winmode: int | None = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, + winmode: int | None = None, ) -> None: ... else: def __init__( - self, name: str | None, mode: int = ..., handle: int | None = ..., use_errno: bool = ..., use_last_error: bool = ... + self, + name: str | None, + mode: int = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, ) -> None: ... def __getattr__(self, name: str) -> _NamedFuncPointer: ... @@ -68,7 +73,7 @@ class _CDataMeta(type): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _CData(metaclass=_CDataMeta): - _b_base: int + _b_base_: int _b_needsfree_: bool _objects: Mapping[Any, int] | None @classmethod @@ -136,11 +141,11 @@ def byref(obj: _CData, offset: int = ...) -> _CArgObject: ... _CastT = TypeVar("_CastT", bound=_CanCastTo) def cast(obj: _CData | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... -def create_string_buffer(init: int | bytes, size: int | None = ...) -> Array[c_char]: ... +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... c_buffer = create_string_buffer -def create_unicode_buffer(init: int | str, size: int | None = ...) -> Array[c_wchar]: ... +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... if sys.platform == "win32": def DllCanUnloadNow() -> int: ... @@ -178,12 +183,12 @@ if sys.platform == "win32": def set_last_error(value: int) -> int: ... def sizeof(obj_or_type: _CData | type[_CData]) -> int: ... -def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ... +def string_at(address: _CVoidConstPLike, size: int = -1) -> bytes: ... if sys.platform == "win32": - def WinError(code: int | None = ..., descr: str | None = ...) -> OSError: ... + def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... -def wstring_at(address: _CVoidConstPLike, size: int = ...) -> str: ... +def wstring_at(address: _CVoidConstPLike, size: int = -1) -> str: ... class _SimpleCData(Generic[_T], _CData): value: _T @@ -266,7 +271,11 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT diff --git a/mypy/typeshed/stdlib/curses/textpad.pyi b/mypy/typeshed/stdlib/curses/textpad.pyi index ad9983431fc7..4d28b4dfbcdc 100644 --- a/mypy/typeshed/stdlib/curses/textpad.pyi +++ b/mypy/typeshed/stdlib/curses/textpad.pyi @@ -7,7 +7,7 @@ if sys.platform != "win32": class Textbox: stripspaces: bool - def __init__(self, win: _CursesWindow, insert_mode: bool = ...) -> None: ... - def edit(self, validate: Callable[[int], int] | None = ...) -> str: ... + def __init__(self, win: _CursesWindow, insert_mode: bool = False) -> None: ... + def edit(self, validate: Callable[[int], int] | None = None) -> str: ... def do_command(self, ch: str | int) -> None: ... def gather(self) -> str: ... diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index 560147f9e96b..3b7327137ec5 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -3,8 +3,8 @@ import sys import types from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping -from typing import Any, Generic, Protocol, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, ClassVar, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal, TypeAlias, TypeGuard if sys.version_info >= (3, 9): from types import GenericAlias @@ -30,6 +30,11 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["KW_ONLY"] +class _DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] + +_DataclassT = TypeVar("_DataclassT", bound=_DataclassInstance) + # define _MISSING_TYPE as an enum within the type stubs, # even though that is not really its type at runtime # this allows us to use Literal[_MISSING_TYPE.MISSING] @@ -44,13 +49,13 @@ if sys.version_info >= (3, 10): class KW_ONLY: ... @overload -def asdict(obj: Any) -> dict[str, Any]: ... +def asdict(obj: _DataclassInstance) -> dict[str, Any]: ... @overload -def asdict(obj: Any, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +def asdict(obj: _DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: Any) -> tuple[Any, ...]: ... +def astuple(obj: _DataclassInstance) -> tuple[Any, ...]: ... @overload -def astuple(obj: Any, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... +def astuple(obj: _DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... if sys.version_info >= (3, 8): # cls argument is now positional-only @@ -69,37 +74,43 @@ if sys.version_info >= (3, 11): @overload def dataclass( *, - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., - weakref_slot: bool = ..., + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... elif sys.version_info >= (3, 10): @overload def dataclass( *, - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... else: @overload def dataclass( - *, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ... + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... # See https://github.com/python/mypy/issues/10750 @@ -152,32 +163,32 @@ if sys.version_info >= (3, 10): def field( *, default: _T, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> _T: ... @overload def field( *, default_factory: Callable[[], _T], - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> _T: ... @overload def field( *, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> Any: ... @@ -186,34 +197,39 @@ else: def field( *, default: _T, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> _T: ... @overload def field( *, default_factory: Callable[[], _T], - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> _T: ... @overload def field( *, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> Any: ... -def fields(class_or_instance: Any) -> tuple[Field[Any], ...]: ... -def is_dataclass(obj: Any) -> bool: ... +def fields(class_or_instance: _DataclassInstance | type[_DataclassInstance]) -> tuple[Field[Any], ...]: ... +@overload +def is_dataclass(obj: _DataclassInstance | type[_DataclassInstance]) -> Literal[True]: ... +@overload +def is_dataclass(obj: type) -> TypeGuard[type[_DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeGuard[_DataclassInstance | type[_DataclassInstance]]: ... class FrozenInstanceError(AttributeError): ... @@ -239,17 +255,17 @@ if sys.version_info >= (3, 11): fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., - weakref_slot: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, ) -> type: ... elif sys.version_info >= (3, 10): @@ -258,16 +274,16 @@ elif sys.version_info >= (3, 10): fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, ) -> type: ... else: @@ -276,13 +292,13 @@ else: fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, ) -> type: ... -def replace(__obj: _T, **changes: Any) -> _T: ... +def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ... diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 43f5902c3c06..377ef0067485 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -261,11 +261,11 @@ class datetime(date): def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def now(cls: type[Self], tz: _TzInfo | None = ...) -> Self: ... + def now(cls: type[Self], tz: _TzInfo | None = None) -> Self: ... else: @overload @classmethod - def now(cls: type[Self], tz: None = ...) -> Self: ... + def now(cls: type[Self], tz: None = None) -> Self: ... @overload @classmethod def now(cls, tz: _TzInfo) -> datetime: ... diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi index 33b8aab96610..ab224086b7be 100644 --- a/mypy/typeshed/stdlib/dbm/__init__.pyi +++ b/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -92,4 +92,4 @@ class _error(Exception): ... error: tuple[type[_error], type[OSError]] def whichdb(filename: str) -> str: ... -def open(file: str, flag: _TFlags = ..., mode: int = ...) -> _Database: ... +def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/mypy/typeshed/stdlib/dbm/dumb.pyi b/mypy/typeshed/stdlib/dbm/dumb.pyi index 738e68968ca8..d65d163ab568 100644 --- a/mypy/typeshed/stdlib/dbm/dumb.pyi +++ b/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -14,7 +14,7 @@ error = OSError # any of the three implementations of dbm (dumb, gnu, ndbm), and this # class is intended to represent the common interface supported by all three. class _Database(MutableMapping[_KeyType, bytes]): - def __init__(self, filebasename: str, mode: str, flag: str = ...) -> None: ... + def __init__(self, filebasename: str, mode: str, flag: str = "c") -> None: ... def sync(self) -> None: ... def iterkeys(self) -> Iterator[bytes]: ... # undocumented def close(self) -> None: ... @@ -29,4 +29,4 @@ class _Database(MutableMapping[_KeyType, bytes]): self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... -def open(file: str, flag: str = ..., mode: int = ...) -> _Database: ... +def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/mypy/typeshed/stdlib/dbm/gnu.pyi b/mypy/typeshed/stdlib/dbm/gnu.pyi index 93b9df1077ce..adaf6fa8e69b 100644 --- a/mypy/typeshed/stdlib/dbm/gnu.pyi +++ b/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -37,4 +37,4 @@ if sys.platform != "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _gdbm: ... + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _gdbm: ... diff --git a/mypy/typeshed/stdlib/dbm/ndbm.pyi b/mypy/typeshed/stdlib/dbm/ndbm.pyi index ca658098bd5c..ac0b75dfa45b 100644 --- a/mypy/typeshed/stdlib/dbm/ndbm.pyi +++ b/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -33,4 +33,4 @@ if sys.platform != "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _dbm: ... + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _dbm: ... diff --git a/mypy/typeshed/stdlib/difflib.pyi b/mypy/typeshed/stdlib/difflib.pyi index df2f8be0168a..310519602695 100644 --- a/mypy/typeshed/stdlib/difflib.pyi +++ b/mypy/typeshed/stdlib/difflib.pyi @@ -29,28 +29,28 @@ class Match(NamedTuple): class SequenceMatcher(Generic[_T]): @overload - def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload - def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload def __init__( self: SequenceMatcher[str], - isjunk: Callable[[str], bool] | None = ..., - a: Sequence[str] = ..., - b: Sequence[str] = ..., - autojunk: bool = ..., + isjunk: Callable[[str], bool] | None = None, + a: Sequence[str] = "", + b: Sequence[str] = "", + autojunk: bool = True, ) -> None: ... def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... def set_seq2(self, b: Sequence[_T]) -> None: ... if sys.version_info >= (3, 9): - def find_longest_match(self, alo: int = ..., ahi: int | None = ..., blo: int = ..., bhi: int | None = ...) -> Match: ... + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... else: def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... def get_matching_blocks(self) -> list[Match]: ... def get_opcodes(self) -> list[tuple[str, int, int, int, int]]: ... - def get_grouped_opcodes(self, n: int = ...) -> Iterable[list[tuple[str, int, int, int, int]]]: ... + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... @@ -59,69 +59,72 @@ class SequenceMatcher(Generic[_T]): # mypy thinks the signatures of the overloads overlap, but the types still work fine @overload -def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = ..., cutoff: float = ...) -> list[AnyStr]: ... # type: ignore[misc] +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... # type: ignore[misc] @overload def get_close_matches( - word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = ..., cutoff: float = ... + word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 ) -> list[Sequence[_T]]: ... class Differ: - def __init__(self, linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ...) -> None: ... + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... def IS_LINE_JUNK(line: str, pat: Any = ...) -> bool: ... # pat is undocumented -def IS_CHARACTER_JUNK(ch: str, ws: str = ...) -> bool: ... # ws is undocumented +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented def unified_diff( a: Sequence[str], b: Sequence[str], - fromfile: str = ..., - tofile: str = ..., - fromfiledate: str = ..., - tofiledate: str = ..., - n: int = ..., - lineterm: str = ..., + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", ) -> Iterator[str]: ... def context_diff( a: Sequence[str], b: Sequence[str], - fromfile: str = ..., - tofile: str = ..., - fromfiledate: str = ..., - tofiledate: str = ..., - n: int = ..., - lineterm: str = ..., + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", ) -> Iterator[str]: ... def ndiff( - a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ... + a: Sequence[str], + b: Sequence[str], + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., ) -> Iterator[str]: ... class HtmlDiff: def __init__( self, - tabsize: int = ..., - wrapcolumn: int | None = ..., - linejunk: Callable[[str], bool] | None = ..., + tabsize: int = 8, + wrapcolumn: int | None = None, + linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., ) -> None: ... def make_file( self, fromlines: Sequence[str], tolines: Sequence[str], - fromdesc: str = ..., - todesc: str = ..., - context: bool = ..., - numlines: int = ..., + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, *, - charset: str = ..., + charset: str = "utf-8", ) -> str: ... def make_table( self, fromlines: Sequence[str], tolines: Sequence[str], - fromdesc: str = ..., - todesc: str = ..., - context: bool = ..., - numlines: int = ..., + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, ) -> str: ... def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... @@ -129,10 +132,10 @@ def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Iterable[bytes | bytearray], b: Iterable[bytes | bytearray], - fromfile: bytes | bytearray = ..., - tofile: bytes | bytearray = ..., - fromfiledate: bytes | bytearray = ..., - tofiledate: bytes | bytearray = ..., - n: int = ..., - lineterm: bytes | bytearray = ..., + fromfile: bytes | bytearray = b"", + tofile: bytes | bytearray = b"", + fromfiledate: bytes | bytearray = b"", + tofiledate: bytes | bytearray = b"", + n: int = 3, + lineterm: bytes | bytearray = b"\n", ) -> Iterator[bytes]: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index 73adba5c19f5..ea837f09c806 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -76,17 +76,19 @@ class Bytecode: self, x: _HaveCodeType | str, *, - first_line: int | None = ..., - current_offset: int | None = ..., - show_caches: bool = ..., - adaptive: bool = ..., + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, ) -> None: ... @classmethod def from_traceback( - cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ..., adaptive: bool = ... + cls: type[Self], tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False ) -> Self: ... else: - def __init__(self, x: _HaveCodeType | str, *, first_line: int | None = ..., current_offset: int | None = ...) -> None: ... + def __init__( + self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None + ) -> None: ... @classmethod def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... @@ -103,37 +105,37 @@ def code_info(x: _HaveCodeType | str) -> str: ... if sys.version_info >= (3, 11): def dis( - x: _HaveCodeType | str | bytes | bytearray | None = ..., + x: _HaveCodeType | str | bytes | bytearray | None = None, *, - file: IO[str] | None = ..., - depth: int | None = ..., - show_caches: bool = ..., - adaptive: bool = ..., + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, ) -> None: ... else: def dis( - x: _HaveCodeType | str | bytes | bytearray | None = ..., *, file: IO[str] | None = ..., depth: int | None = ... + x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None ) -> None: ... if sys.version_info >= (3, 11): def disassemble( - co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def disco( - co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def distb( - tb: types.TracebackType | None = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def get_instructions( - x: _HaveCodeType, *, first_line: int | None = ..., show_caches: bool = ..., adaptive: bool = ... + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False ) -> Iterator[Instruction]: ... else: - def disassemble(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... - def disco(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... - def distb(tb: types.TracebackType | None = ..., *, file: IO[str] | None = ...) -> None: ... - def get_instructions(x: _HaveCodeType, *, first_line: int | None = ...) -> Iterator[Instruction]: ... + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def disco(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... -def show_code(co: _HaveCodeType, *, file: IO[str] | None = ...) -> None: ... +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/archive_util.pyi b/mypy/typeshed/stdlib/distutils/archive_util.pyi index 38458fc0e003..a8947ce35c60 100644 --- a/mypy/typeshed/stdlib/distutils/archive_util.pyi +++ b/mypy/typeshed/stdlib/distutils/archive_util.pyi @@ -1,20 +1,20 @@ def make_archive( base_name: str, format: str, - root_dir: str | None = ..., - base_dir: str | None = ..., - verbose: int = ..., - dry_run: int = ..., - owner: str | None = ..., - group: str | None = ..., + root_dir: str | None = None, + base_dir: str | None = None, + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, ) -> str: ... def make_tarball( base_name: str, base_dir: str, - compress: str | None = ..., - verbose: int = ..., - dry_run: int = ..., - owner: str | None = ..., - group: str | None = ..., + compress: str | None = "gzip", + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, ) -> str: ... -def make_zipfile(base_name: str, base_dir: str, verbose: int = ..., dry_run: int = ...) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: int = 0, dry_run: int = 0) -> str: ... diff --git a/mypy/typeshed/stdlib/distutils/ccompiler.pyi b/mypy/typeshed/stdlib/distutils/ccompiler.pyi index 5b92c5f5c42e..711b30ba4e0e 100644 --- a/mypy/typeshed/stdlib/distutils/ccompiler.pyi +++ b/mypy/typeshed/stdlib/distutils/ccompiler.pyi @@ -8,9 +8,9 @@ def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] ) -> list[str]: ... def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... -def get_default_compiler(osname: str | None = ..., platform: str | None = ...) -> str: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... def new_compiler( - plat: str | None = ..., compiler: str | None = ..., verbose: int = ..., dry_run: int = ..., force: int = ... + plat: str | None = None, compiler: str | None = None, verbose: int = 0, dry_run: int = 0, force: int = 0 ) -> CCompiler: ... def show_compilers() -> None: ... @@ -25,7 +25,7 @@ class CCompiler: library_dirs: list[str] runtime_library_dirs: list[str] objects: list[str] - def __init__(self, verbose: int = ..., dry_run: int = ..., force: int = ...) -> None: ... + def __init__(self, verbose: int = 0, dry_run: int = 0, force: int = 0) -> None: ... def add_include_dir(self, dir: str) -> None: ... def set_include_dirs(self, dirs: list[str]) -> None: ... def add_library(self, libname: str) -> None: ... @@ -34,7 +34,7 @@ class CCompiler: def set_library_dirs(self, dirs: list[str]) -> None: ... def add_runtime_library_dir(self, dir: str) -> None: ... def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... - def define_macro(self, name: str, value: str | None = ...) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... def undefine_macro(self, name: str) -> None: ... def add_link_object(self, object: str) -> None: ... def set_link_objects(self, objects: list[str]) -> None: ... @@ -43,10 +43,10 @@ class CCompiler: def has_function( self, funcname: str, - includes: list[str] | None = ..., - include_dirs: list[str] | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., + includes: list[str] | None = None, + include_dirs: list[str] | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, ) -> bool: ... def library_dir_option(self, dir: str) -> str: ... def library_option(self, lib: str) -> str: ... @@ -55,98 +55,98 @@ class CCompiler: def compile( self, sources: list[str], - output_dir: str | None = ..., - macros: _Macro | None = ..., - include_dirs: list[str] | None = ..., + output_dir: str | None = None, + macros: _Macro | None = None, + include_dirs: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - depends: list[str] | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + depends: list[str] | None = None, ) -> list[str]: ... def create_static_lib( self, objects: list[str], output_libname: str, - output_dir: str | None = ..., + output_dir: str | None = None, debug: bool = ..., - target_lang: str | None = ..., + target_lang: str | None = None, ) -> None: ... def link( self, target_desc: str, objects: list[str], output_filename: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def link_executable( self, objects: list[str], output_progname: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + target_lang: str | None = None, ) -> None: ... def link_shared_lib( self, objects: list[str], output_libname: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def link_shared_object( self, objects: list[str], output_filename: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def preprocess( self, source: str, - output_file: str | None = ..., - macros: list[_Macro] | None = ..., - include_dirs: list[str] | None = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., + output_file: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | None = None, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, ) -> None: ... - def executable_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... - def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... - def object_filenames(self, source_filenames: list[str], strip_dir: int = ..., output_dir: str = ...) -> list[str]: ... - def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... - def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., level: int = ...) -> None: ... + def executable_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def library_filename(self, libname: str, lib_type: str = "static", strip_dir: int = 0, output_dir: str = "") -> str: ... + def object_filenames(self, source_filenames: list[str], strip_dir: int = 0, output_dir: str = "") -> list[str]: ... + def shared_object_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, level: int = 1) -> None: ... def spawn(self, cmd: list[str]) -> None: ... - def mkpath(self, name: str, mode: int = ...) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... def move_file(self, src: str, dst: str) -> str: ... - def announce(self, msg: str, level: int = ...) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... def warn(self, msg: str) -> None: ... def debug_print(self, msg: str) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/cmd.pyi b/mypy/typeshed/stdlib/distutils/cmd.pyi index e706bdbc5802..d9ffee9cb832 100644 --- a/mypy/typeshed/stdlib/distutils/cmd.pyi +++ b/mypy/typeshed/stdlib/distutils/cmd.pyi @@ -12,49 +12,43 @@ class Command: def finalize_options(self) -> None: ... @abstractmethod def run(self) -> None: ... - def announce(self, msg: str, level: int = ...) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... def debug_print(self, msg: str) -> None: ... - def ensure_string(self, option: str, default: str | None = ...) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... def ensure_string_list(self, option: str | list[str]) -> None: ... def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... - def get_finalized_command(self, command: str, create: int = ...) -> Command: ... - def reinitialize_command(self, command: Command | str, reinit_subcommands: int = ...) -> Command: ... + def get_finalized_command(self, command: str, create: int = 1) -> Command: ... + def reinitialize_command(self, command: Command | str, reinit_subcommands: int = 0) -> Command: ... def run_command(self, command: str) -> None: ... def get_sub_commands(self) -> list[str]: ... def warn(self, msg: str) -> None: ... - def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = ..., level: int = ...) -> None: ... - def mkpath(self, name: str, mode: int = ...) -> None: ... + def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... def copy_file( - self, - infile: str, - outfile: str, - preserve_mode: int = ..., - preserve_times: int = ..., - link: str | None = ..., - level: Any = ..., + self, infile: str, outfile: str, preserve_mode: int = 1, preserve_times: int = 1, link: str | None = None, level: Any = 1 ) -> tuple[str, bool]: ... # level is not used def copy_tree( self, infile: str, outfile: str, - preserve_mode: int = ..., - preserve_times: int = ..., - preserve_symlinks: int = ..., - level: Any = ..., + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + level: Any = 1, ) -> list[str]: ... # level is not used - def move_file(self, src: str, dst: str, level: Any = ...) -> str: ... # level is not used - def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ... # level is not used + def move_file(self, src: str, dst: str, level: Any = 1) -> str: ... # level is not used + def spawn(self, cmd: Iterable[str], search_path: int = 1, level: Any = 1) -> None: ... # level is not used def make_archive( self, base_name: str, format: str, - root_dir: str | None = ..., - base_dir: str | None = ..., - owner: str | None = ..., - group: str | None = ..., + root_dir: str | None = None, + base_dir: str | None = None, + owner: str | None = None, + group: str | None = None, ) -> str: ... def make_file( self, @@ -62,7 +56,7 @@ class Command: outfile: str, func: Callable[..., object], args: list[Any], - exec_msg: str | None = ..., - skip_msg: str | None = ..., - level: Any = ..., + exec_msg: str | None = None, + skip_msg: str | None = None, + level: Any = 1, ) -> None: ... # level is not used diff --git a/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi b/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi index 66202e841d3c..fa98e86d592a 100644 --- a/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi +++ b/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -9,9 +9,9 @@ if sys.platform == "win32": class PyDialog(Dialog): def __init__(self, *args, **kw) -> None: ... def title(self, title) -> None: ... - def back(self, title, next, name: str = ..., active: int = ...): ... - def cancel(self, title, next, name: str = ..., active: int = ...): ... - def next(self, title, next, name: str = ..., active: int = ...): ... + def back(self, title, next, name: str = "Back", active: int = 1): ... + def cancel(self, title, next, name: str = "Cancel", active: int = 1): ... + def next(self, title, next, name: str = "Next", active: int = 1): ... def xbutton(self, name, title, next, xpos): ... class bdist_msi(Command): diff --git a/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi index 1091fb278493..8491d3126200 100644 --- a/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi +++ b/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -11,6 +11,6 @@ class bdist_wininst(Command): def finalize_options(self) -> None: ... def run(self) -> None: ... def get_inidata(self) -> str: ... - def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = ...) -> None: ... + def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = None) -> None: ... def get_installer_filename(self, fullname: str) -> str: ... def get_exe_bytes(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/distutils/command/build_py.pyi b/mypy/typeshed/stdlib/distutils/command/build_py.pyi index 3c6e022c2a10..ca4e4ed7e797 100644 --- a/mypy/typeshed/stdlib/distutils/command/build_py.pyi +++ b/mypy/typeshed/stdlib/distutils/command/build_py.pyi @@ -32,7 +32,7 @@ class build_py(Command): def find_all_modules(self): ... def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... - def get_outputs(self, include_bytecode: int = ...): ... + def get_outputs(self, include_bytecode: int = 1): ... def build_module(self, module, module_file, package): ... def build_modules(self) -> None: ... def build_packages(self) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/command/config.pyi b/mypy/typeshed/stdlib/distutils/command/config.pyi index 03466ca72985..81fdf76b2b59 100644 --- a/mypy/typeshed/stdlib/distutils/command/config.pyi +++ b/mypy/typeshed/stdlib/distutils/command/config.pyi @@ -24,60 +24,60 @@ class config(Command): def run(self) -> None: ... def try_cpp( self, - body: str | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - lang: str = ..., + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def search_cpp( self, pattern: Pattern[str] | str, - body: str | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - lang: str = ..., + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def try_compile( - self, body: str, headers: Sequence[str] | None = ..., include_dirs: Sequence[str] | None = ..., lang: str = ... + self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: ... def try_link( self, body: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - lang: str = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def try_run( self, body: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - lang: str = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def check_func( self, func: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - decl: int = ..., - call: int = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + decl: int = 0, + call: int = 0, ) -> bool: ... def check_lib( self, library: str, - library_dirs: Sequence[str] | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., + library_dirs: Sequence[str] | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, other_libraries: list[str] = ..., ) -> bool: ... def check_header( - self, header: str, include_dirs: Sequence[str] | None = ..., library_dirs: Sequence[str] | None = ..., lang: str = ... + self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: ... -def dump_file(filename: str, head: Any | None = ...) -> None: ... +def dump_file(filename: str, head: Any | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/command/register.pyi b/mypy/typeshed/stdlib/distutils/command/register.pyi index a1a7a45fb3d7..f88b94113ff4 100644 --- a/mypy/typeshed/stdlib/distutils/command/register.pyi +++ b/mypy/typeshed/stdlib/distutils/command/register.pyi @@ -15,4 +15,4 @@ class register(PyPIRCCommand): def verify_metadata(self) -> None: ... def send_metadata(self) -> None: ... def build_post_data(self, action): ... - def post_to_server(self, data, auth: Any | None = ...): ... + def post_to_server(self, data, auth: Any | None = None): ... diff --git a/mypy/typeshed/stdlib/distutils/core.pyi b/mypy/typeshed/stdlib/distutils/core.pyi index 199a4d70a953..56081f921378 100644 --- a/mypy/typeshed/stdlib/distutils/core.pyi +++ b/mypy/typeshed/stdlib/distutils/core.pyi @@ -46,4 +46,4 @@ def setup( fullname: str = ..., **attrs: Any, ) -> None: ... -def run_setup(script_name: str, script_args: list[str] | None = ..., stop_after: str = ...) -> Distribution: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/mypy/typeshed/stdlib/distutils/dep_util.pyi b/mypy/typeshed/stdlib/distutils/dep_util.pyi index 929d6ffd0c81..096ce19d4859 100644 --- a/mypy/typeshed/stdlib/distutils/dep_util.pyi +++ b/mypy/typeshed/stdlib/distutils/dep_util.pyi @@ -1,3 +1,3 @@ def newer(source: str, target: str) -> bool: ... def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ... -def newer_group(sources: list[str], target: str, missing: str = ...) -> bool: ... +def newer_group(sources: list[str], target: str, missing: str = "error") -> bool: ... diff --git a/mypy/typeshed/stdlib/distutils/dir_util.pyi b/mypy/typeshed/stdlib/distutils/dir_util.pyi index ffe5ff1cfbd4..2324a2d50caa 100644 --- a/mypy/typeshed/stdlib/distutils/dir_util.pyi +++ b/mypy/typeshed/stdlib/distutils/dir_util.pyi @@ -1,13 +1,13 @@ -def mkpath(name: str, mode: int = ..., verbose: int = ..., dry_run: int = ...) -> list[str]: ... -def create_tree(base_dir: str, files: list[str], mode: int = ..., verbose: int = ..., dry_run: int = ...) -> None: ... +def mkpath(name: str, mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> list[str]: ... +def create_tree(base_dir: str, files: list[str], mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> None: ... def copy_tree( src: str, dst: str, - preserve_mode: int = ..., - preserve_times: int = ..., - preserve_symlinks: int = ..., - update: int = ..., - verbose: int = ..., - dry_run: int = ..., + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + update: int = 0, + verbose: int = 1, + dry_run: int = 0, ) -> list[str]: ... -def remove_tree(directory: str, verbose: int = ..., dry_run: int = ...) -> None: ... +def remove_tree(directory: str, verbose: int = 1, dry_run: int = 0) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/dist.pyi b/mypy/typeshed/stdlib/distutils/dist.pyi index fc1bce261e57..b411324c4ce6 100644 --- a/mypy/typeshed/stdlib/distutils/dist.pyi +++ b/mypy/typeshed/stdlib/distutils/dist.pyi @@ -4,7 +4,7 @@ from distutils.cmd import Command from typing import IO, Any class DistributionMetadata: - def __init__(self, path: FileDescriptorOrPath | None = ...) -> None: ... + def __init__(self, path: FileDescriptorOrPath | None = None) -> None: ... name: str | None version: str | None author: str | None @@ -53,7 +53,7 @@ class DistributionMetadata: class Distribution: cmdclass: dict[str, type[Command]] metadata: DistributionMetadata - def __init__(self, attrs: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ... def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... - def parse_config_files(self, filenames: Iterable[str] | None = ...) -> None: ... + def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... diff --git a/mypy/typeshed/stdlib/distutils/extension.pyi b/mypy/typeshed/stdlib/distutils/extension.pyi index 5639f44a6d03..789bbf6ec3d1 100644 --- a/mypy/typeshed/stdlib/distutils/extension.pyi +++ b/mypy/typeshed/stdlib/distutils/extension.pyi @@ -19,18 +19,18 @@ class Extension: self, name: str, sources: list[str], - include_dirs: list[str] | None = ..., - define_macros: list[tuple[str, str | None]] | None = ..., - undef_macros: list[str] | None = ..., - library_dirs: list[str] | None = ..., - libraries: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - extra_objects: list[str] | None = ..., - extra_compile_args: list[str] | None = ..., - extra_link_args: list[str] | None = ..., - export_symbols: list[str] | None = ..., - swig_opts: list[str] | None = ..., - depends: list[str] | None = ..., - language: str | None = ..., - optional: bool | None = ..., + include_dirs: list[str] | None = None, + define_macros: list[tuple[str, str | None]] | None = None, + undef_macros: list[str] | None = None, + library_dirs: list[str] | None = None, + libraries: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + extra_objects: list[str] | None = None, + extra_compile_args: list[str] | None = None, + extra_link_args: list[str] | None = None, + export_symbols: list[str] | None = None, + swig_opts: list[str] | None = None, + depends: list[str] | None = None, + language: str | None = None, + optional: bool | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi index 6a7124bd15ad..153583be6b5d 100644 --- a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -11,14 +11,14 @@ def fancy_getopt( def wrap_text(text: str, width: int) -> list[str]: ... class FancyGetopt: - def __init__(self, option_table: list[_Option] | None = ...) -> None: ... + def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload - def getopt(self, args: list[str] | None = ...) -> _GR: ... + def getopt(self, args: list[str] | None = None) -> _GR: ... @overload def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... def get_option_order(self) -> list[tuple[str, str]]: ... - def generate_help(self, header: str | None = ...) -> list[str]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... class OptionDummy: def __init__(self, options: Iterable[str] = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/file_util.pyi b/mypy/typeshed/stdlib/distutils/file_util.pyi index b3127841bce8..a97dfca60007 100644 --- a/mypy/typeshed/stdlib/distutils/file_util.pyi +++ b/mypy/typeshed/stdlib/distutils/file_util.pyi @@ -6,7 +6,7 @@ def copy_file( preserve_mode: bool = ..., preserve_times: bool = ..., update: bool = ..., - link: str | None = ..., + link: str | None = None, verbose: bool = ..., dry_run: bool = ..., ) -> tuple[str, str]: ... diff --git a/mypy/typeshed/stdlib/distutils/filelist.pyi b/mypy/typeshed/stdlib/distutils/filelist.pyi index 1cfdcf08dca9..bea48ac16ac5 100644 --- a/mypy/typeshed/stdlib/distutils/filelist.pyi +++ b/mypy/typeshed/stdlib/distutils/filelist.pyi @@ -7,9 +7,9 @@ from typing_extensions import Literal class FileList: allfiles: Iterable[str] | None files: list[str] - def __init__(self, warn: None = ..., debug_print: None = ...) -> None: ... + def __init__(self, warn: None = None, debug_print: None = None) -> None: ... def set_allfiles(self, allfiles: Iterable[str]) -> None: ... - def findall(self, dir: str = ...) -> None: ... + def findall(self, dir: str = ".") -> None: ... def debug_print(self, msg: str) -> None: ... def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... @@ -18,34 +18,34 @@ class FileList: def process_template_line(self, line: str) -> None: ... @overload def include_pattern( - self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 ) -> bool: ... @overload - def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def include_pattern( - self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> bool: ... @overload def exclude_pattern( - self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 ) -> bool: ... @overload - def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def exclude_pattern( - self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> bool: ... -def findall(dir: str = ...) -> list[str]: ... +def findall(dir: str = ".") -> list[str]: ... def glob_to_re(pattern: str) -> str: ... @overload def translate_pattern( - pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[False, 0] = ... + pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 ) -> Pattern[str]: ... @overload -def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> Pattern[str]: ... +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload def translate_pattern( - pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> Pattern[str]: ... diff --git a/mypy/typeshed/stdlib/distutils/log.pyi b/mypy/typeshed/stdlib/distutils/log.pyi index 549b569e7356..14ed8d8aefa8 100644 --- a/mypy/typeshed/stdlib/distutils/log.pyi +++ b/mypy/typeshed/stdlib/distutils/log.pyi @@ -7,7 +7,7 @@ ERROR: int FATAL: int class Log: - def __init__(self, threshold: int = ...) -> None: ... + def __init__(self, threshold: int = 3) -> None: ... def log(self, level: int, msg: str, *args: Any) -> None: ... def debug(self, msg: str, *args: Any) -> None: ... def info(self, msg: str, *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/spawn.pyi b/mypy/typeshed/stdlib/distutils/spawn.pyi index dda05ad7e85a..a8a2c4140b2d 100644 --- a/mypy/typeshed/stdlib/distutils/spawn.pyi +++ b/mypy/typeshed/stdlib/distutils/spawn.pyi @@ -1,2 +1,2 @@ def spawn(cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... -def find_executable(executable: str, path: str | None = ...) -> str | None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/mypy/typeshed/stdlib/distutils/sysconfig.pyi b/mypy/typeshed/stdlib/distutils/sysconfig.pyi index bf7db9c8f06b..8b291e8b94a5 100644 --- a/mypy/typeshed/stdlib/distutils/sysconfig.pyi +++ b/mypy/typeshed/stdlib/distutils/sysconfig.pyi @@ -8,6 +8,6 @@ def get_config_var(name: str) -> int | str | None: ... def get_config_vars(*args: str) -> Mapping[str, int | str]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... -def get_python_inc(plat_specific: bool = ..., prefix: str | None = ...) -> str: ... -def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = ...) -> str: ... +def get_python_inc(plat_specific: bool = ..., prefix: str | None = None) -> str: ... +def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = None) -> str: ... def customize_compiler(compiler: CCompiler) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/text_file.pyi b/mypy/typeshed/stdlib/distutils/text_file.pyi index ace642e027cf..4a6cf1db77c6 100644 --- a/mypy/typeshed/stdlib/distutils/text_file.pyi +++ b/mypy/typeshed/stdlib/distutils/text_file.pyi @@ -3,8 +3,8 @@ from typing import IO class TextFile: def __init__( self, - filename: str | None = ..., - file: IO[str] | None = ..., + filename: str | None = None, + file: IO[str] | None = None, *, strip_comments: bool = ..., lstrip_ws: bool = ..., @@ -15,7 +15,7 @@ class TextFile: ) -> None: ... def open(self, filename: str) -> None: ... def close(self) -> None: ... - def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = ...) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... def readline(self) -> str | None: ... def readlines(self) -> list[str]: ... def unreadline(self, line: str) -> str: ... diff --git a/mypy/typeshed/stdlib/distutils/util.pyi b/mypy/typeshed/stdlib/distutils/util.pyi index da8d66063536..f03844307581 100644 --- a/mypy/typeshed/stdlib/distutils/util.pyi +++ b/mypy/typeshed/stdlib/distutils/util.pyi @@ -1,4 +1,4 @@ -from _typeshed import StrPath +from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any from typing_extensions import Literal @@ -10,33 +10,33 @@ def check_environ() -> None: ... def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... def split_quoted(s: str) -> list[str]: ... def execute( - func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., verbose: bool = ..., dry_run: bool = ... + func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, verbose: bool = ..., dry_run: bool = ... ) -> None: ... def strtobool(val: str) -> Literal[0, 1]: ... def byte_compile( py_files: list[str], - optimize: int = ..., + optimize: int = 0, force: bool = ..., - prefix: str | None = ..., - base_dir: str | None = ..., + prefix: str | None = None, + base_dir: str | None = None, verbose: bool = ..., dry_run: bool = ..., - direct: bool | None = ..., + direct: bool | None = None, ) -> None: ... def rfc822_escape(header: str) -> str: ... def run_2to3( files: Iterable[str], - fixer_names: Iterable[str] | None = ..., - options: Mapping[str, Any] | None = ..., - explicit: Container[str] | None = ..., # unused + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Unused = None, ) -> None: ... def copydir_run_2to3( src: StrPath, dest: StrPath, - template: str | None = ..., - fixer_names: Iterable[str] | None = ..., - options: Mapping[str, Any] | None = ..., - explicit: Container[str] | None = ..., + template: str | None = None, + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Container[str] | None = None, ) -> list[str]: ... class Mixin2to3: diff --git a/mypy/typeshed/stdlib/distutils/version.pyi b/mypy/typeshed/stdlib/distutils/version.pyi index 627d45067b5c..4f1b64a7381d 100644 --- a/mypy/typeshed/stdlib/distutils/version.pyi +++ b/mypy/typeshed/stdlib/distutils/version.pyi @@ -9,7 +9,7 @@ class Version: def __gt__(self: Self, other: Self | str) -> bool: ... def __ge__(self: Self, other: Self | str) -> bool: ... @abstractmethod - def __init__(self, vstring: str | None = ...) -> None: ... + def __init__(self, vstring: str | None = None) -> None: ... @abstractmethod def parse(self: Self, vstring: str) -> Self: ... @abstractmethod @@ -21,7 +21,7 @@ class StrictVersion(Version): version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None - def __init__(self, vstring: str | None = ...) -> None: ... + def __init__(self, vstring: str | None = None) -> None: ... def parse(self: Self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 def _cmp(self: Self, other: Self | str) -> bool: ... @@ -30,7 +30,7 @@ class LooseVersion(Version): component_re: Pattern[str] vstring: str version: tuple[str | int, ...] - def __init__(self, vstring: str | None = ...) -> None: ... + def __init__(self, vstring: str | None = None) -> None: ... def parse(self: Self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 def _cmp(self: Self, other: Self | str) -> bool: ... diff --git a/mypy/typeshed/stdlib/doctest.pyi b/mypy/typeshed/stdlib/doctest.pyi index 719551eb77de..88d066fdc23c 100644 --- a/mypy/typeshed/stdlib/doctest.pyi +++ b/mypy/typeshed/stdlib/doctest.pyi @@ -80,10 +80,10 @@ class Example: self, source: str, want: str, - exc_msg: str | None = ..., - lineno: int = ..., - indent: int = ..., - options: dict[int, bool] | None = ..., + exc_msg: str | None = None, + lineno: int = 0, + indent: int = 0, + options: dict[int, bool] | None = None, ) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -107,21 +107,21 @@ class DocTest: def __eq__(self, other: object) -> bool: ... class DocTestParser: - def parse(self, string: str, name: str = ...) -> list[str | Example]: ... + def parse(self, string: str, name: str = "") -> list[str | Example]: ... def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... - def get_examples(self, string: str, name: str = ...) -> list[Example]: ... + def get_examples(self, string: str, name: str = "") -> list[Example]: ... class DocTestFinder: def __init__( - self, verbose: bool = ..., parser: DocTestParser = ..., recurse: bool = ..., exclude_empty: bool = ... + self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True ) -> None: ... def find( self, obj: object, - name: str | None = ..., - module: None | bool | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - extraglobs: dict[str, Any] | None = ..., + name: str | None = None, + module: None | bool | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, ) -> list[DocTest]: ... _Out: TypeAlias = Callable[[str], object] @@ -133,15 +133,15 @@ class DocTestRunner: tries: int failures: int test: DocTest - def __init__(self, checker: OutputChecker | None = ..., verbose: bool | None = ..., optionflags: int = ...) -> None: ... + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... def run( - self, test: DocTest, compileflags: int | None = ..., out: _Out | None = ..., clear_globs: bool = ... + self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True ) -> TestResults: ... - def summarize(self, verbose: bool | None = ...) -> TestResults: ... + def summarize(self, verbose: bool | None = None) -> TestResults: ... def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: @@ -165,32 +165,37 @@ class DebugRunner(DocTestRunner): ... master: DocTestRunner | None def testmod( - m: types.ModuleType | None = ..., - name: str | None = ..., - globs: dict[str, Any] | None = ..., - verbose: bool | None = ..., - report: bool = ..., - optionflags: int = ..., - extraglobs: dict[str, Any] | None = ..., - raise_on_error: bool = ..., - exclude_empty: bool = ..., + m: types.ModuleType | None = None, + name: str | None = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + exclude_empty: bool = False, ) -> TestResults: ... def testfile( filename: str, - module_relative: bool = ..., - name: str | None = ..., - package: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - verbose: bool | None = ..., - report: bool = ..., - optionflags: int = ..., - extraglobs: dict[str, Any] | None = ..., - raise_on_error: bool = ..., + module_relative: bool = True, + name: str | None = None, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, parser: DocTestParser = ..., - encoding: str | None = ..., + encoding: str | None = None, ) -> TestResults: ... def run_docstring_examples( - f: object, globs: dict[str, Any], verbose: bool = ..., name: str = ..., compileflags: int | None = ..., optionflags: int = ... + f: object, + globs: dict[str, Any], + verbose: bool = False, + name: str = "NoName", + compileflags: int | None = None, + optionflags: int = 0, ) -> None: ... def set_unittest_reportflags(flags: int) -> int: ... @@ -198,10 +203,10 @@ class DocTestCase(unittest.TestCase): def __init__( self, test: DocTest, - optionflags: int = ..., - setUp: Callable[[DocTest], Any] | None = ..., - tearDown: Callable[[DocTest], Any] | None = ..., - checker: OutputChecker | None = ..., + optionflags: int = 0, + setUp: Callable[[DocTest], Any] | None = None, + tearDown: Callable[[DocTest], Any] | None = None, + checker: OutputChecker | None = None, ) -> None: ... def runTest(self) -> None: ... def format_failure(self, err: str) -> str: ... @@ -214,10 +219,10 @@ class SkipDocTestCase(DocTestCase): class _DocTestSuite(unittest.TestSuite): ... def DocTestSuite( - module: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - extraglobs: dict[str, Any] | None = ..., - test_finder: DocTestFinder | None = ..., + module: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + test_finder: DocTestFinder | None = None, **options: Any, ) -> _DocTestSuite: ... @@ -225,16 +230,16 @@ class DocFileCase(DocTestCase): ... def DocFileTest( path: str, - module_relative: bool = ..., - package: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., + module_relative: bool = True, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, parser: DocTestParser = ..., - encoding: str | None = ..., + encoding: str | None = None, **options: Any, ) -> DocFileCase: ... def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... def script_from_examples(s: str) -> str: ... def testsource(module: None | str | types.ModuleType, name: str) -> str: ... -def debug_src(src: str, pm: bool = ..., globs: dict[str, Any] | None = ...) -> None: ... -def debug_script(src: str, pm: bool = ..., globs: dict[str, Any] | None = ...) -> None: ... -def debug(module: None | str | types.ModuleType, name: str, pm: bool = ...) -> None: ... +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/email/_header_value_parser.pyi b/mypy/typeshed/stdlib/email/_header_value_parser.pyi index 28a851d2f4e7..0e422294e77a 100644 --- a/mypy/typeshed/stdlib/email/_header_value_parser.pyi +++ b/mypy/typeshed/stdlib/email/_header_value_parser.pyi @@ -39,8 +39,8 @@ class TokenList(list[TokenList | Terminal]): @property def comments(self) -> list[str]: ... def fold(self, *, policy: Policy) -> str: ... - def pprint(self, indent: str = ...) -> None: ... - def ppstr(self, indent: str = ...) -> str: ... + def pprint(self, indent: str = "") -> None: ... + def ppstr(self, indent: str = "") -> str: ... class WhiteSpaceTokenList(TokenList): ... diff --git a/mypy/typeshed/stdlib/email/base64mime.pyi b/mypy/typeshed/stdlib/email/base64mime.pyi index 16118a879ad7..563cd7f669a2 100644 --- a/mypy/typeshed/stdlib/email/base64mime.pyi +++ b/mypy/typeshed/stdlib/email/base64mime.pyi @@ -3,10 +3,10 @@ __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encod from _typeshed import ReadableBuffer def header_length(bytearray: str | bytes | bytearray) -> int: ... -def header_encode(header_bytes: str | ReadableBuffer, charset: str = ...) -> str: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... # First argument should be a buffer that supports slicing and len(). -def body_encode(s: bytes | bytearray, maxlinelen: int = ..., eol: str = ...) -> str: ... +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... def decode(string: str | ReadableBuffer) -> bytes: ... body_decode = decode diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi index 236908537f83..24b8fd768b7b 100644 --- a/mypy/typeshed/stdlib/email/charset.pyi +++ b/mypy/typeshed/stdlib/email/charset.pyi @@ -13,7 +13,7 @@ class Charset: output_charset: str | None input_codec: str | None output_codec: str | None - def __init__(self, input_charset: str = ...) -> None: ... + def __init__(self, input_charset: str = "us-ascii") -> None: ... def get_body_encoding(self) -> str: ... def get_output_charset(self) -> str | None: ... def header_encode(self, string: str) -> str: ... @@ -23,7 +23,7 @@ class Charset: def __ne__(self, __other: object) -> bool: ... def add_charset( - charset: str, header_enc: int | None = ..., body_enc: int | None = ..., output_charset: str | None = ... + charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None ) -> None: ... def add_alias(alias: str, canonical: str) -> None: ... def add_codec(charset: str, codecname: str) -> None: ... diff --git a/mypy/typeshed/stdlib/email/errors.pyi b/mypy/typeshed/stdlib/email/errors.pyi index 656cbd374ac4..c54f1560c9ae 100644 --- a/mypy/typeshed/stdlib/email/errors.pyi +++ b/mypy/typeshed/stdlib/email/errors.pyi @@ -8,7 +8,7 @@ class MultipartConversionError(MessageError, TypeError): ... class CharsetError(MessageError): ... class MessageDefect(ValueError): - def __init__(self, line: str | None = ...) -> None: ... + def __init__(self, line: str | None = None) -> None: ... class NoBoundaryInMultipartDefect(MessageDefect): ... class StartBoundaryNotFoundDefect(MessageDefect): ... diff --git a/mypy/typeshed/stdlib/email/feedparser.pyi b/mypy/typeshed/stdlib/email/feedparser.pyi index 809f0b0e112b..4b7f73b9c015 100644 --- a/mypy/typeshed/stdlib/email/feedparser.pyi +++ b/mypy/typeshed/stdlib/email/feedparser.pyi @@ -9,7 +9,7 @@ _MessageT = TypeVar("_MessageT", bound=Message) class FeedParser(Generic[_MessageT]): @overload - def __init__(self: FeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... def feed(self, data: str) -> None: ... @@ -17,7 +17,7 @@ class FeedParser(Generic[_MessageT]): class BytesFeedParser(Generic[_MessageT]): @overload - def __init__(self: BytesFeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... def feed(self, data: bytes | bytearray) -> None: ... diff --git a/mypy/typeshed/stdlib/email/generator.pyi b/mypy/typeshed/stdlib/email/generator.pyi index 5a6b6374dd4b..8362dd9c4ff6 100644 --- a/mypy/typeshed/stdlib/email/generator.pyi +++ b/mypy/typeshed/stdlib/email/generator.pyi @@ -10,12 +10,12 @@ class Generator: def __init__( self, outfp: SupportsWrite[str], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... - def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... class BytesGenerator: def clone(self, fp: SupportsWrite[bytes]) -> BytesGenerator: ... @@ -23,20 +23,20 @@ class BytesGenerator: def __init__( self, outfp: SupportsWrite[bytes], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... - def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... class DecodedGenerator(Generator): def __init__( self, outfp: SupportsWrite[str], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., - fmt: str | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/header.pyi b/mypy/typeshed/stdlib/email/header.pyi index 58740bd1bdae..c6f0c6fbf6fc 100644 --- a/mypy/typeshed/stdlib/email/header.pyi +++ b/mypy/typeshed/stdlib/email/header.pyi @@ -7,15 +7,15 @@ __all__ = ["Header", "decode_header", "make_header"] class Header: def __init__( self, - s: bytes | bytearray | str | None = ..., - charset: Charset | str | None = ..., - maxlinelen: int | None = ..., - header_name: str | None = ..., - continuation_ws: str = ..., - errors: str = ..., + s: bytes | bytearray | str | None = None, + charset: Charset | str | None = None, + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", + errors: str = "strict", ) -> None: ... - def append(self, s: bytes | bytearray | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... - def encode(self, splitchars: str = ..., maxlinelen: int | None = ..., linesep: str = ...) -> str: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... @@ -25,7 +25,7 @@ class Header: def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... def make_header( decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], - maxlinelen: int | None = ..., - header_name: str | None = ..., - continuation_ws: str = ..., + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", ) -> Header: ... diff --git a/mypy/typeshed/stdlib/email/headerregistry.pyi b/mypy/typeshed/stdlib/email/headerregistry.pyi index b2b63c4ac72c..df07e2458e81 100644 --- a/mypy/typeshed/stdlib/email/headerregistry.pyi +++ b/mypy/typeshed/stdlib/email/headerregistry.pyi @@ -153,7 +153,7 @@ class HeaderRegistry: base_class: type[BaseHeader] default_class: type[_HeaderParser] def __init__( - self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = ... + self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True ) -> None: ... def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... def __getitem__(self, name: str) -> type[BaseHeader]: ... @@ -169,7 +169,7 @@ class Address: @property def addr_spec(self) -> str: ... def __init__( - self, display_name: str = ..., username: str | None = ..., domain: str | None = ..., addr_spec: str | None = ... + self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None ) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -178,5 +178,5 @@ class Group: def display_name(self) -> str | None: ... @property def addresses(self) -> tuple[Address, ...]: ... - def __init__(self, display_name: str | None = ..., addresses: Iterable[Address] | None = ...) -> None: ... + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... def __eq__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/email/iterators.pyi b/mypy/typeshed/stdlib/email/iterators.pyi index 29068819ac15..d964d6843833 100644 --- a/mypy/typeshed/stdlib/email/iterators.pyi +++ b/mypy/typeshed/stdlib/email/iterators.pyi @@ -4,9 +4,9 @@ from email.message import Message __all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] -def body_line_iterator(msg: Message, decode: bool = ...) -> Iterator[str]: ... -def typed_subpart_iterator(msg: Message, maintype: str = ..., subtype: str | None = ...) -> Iterator[str]: ... +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... def walk(self: Message) -> Iterator[Message]: ... # We include the seemingly private function because it is documented in the stdlib documentation. -def _structure(msg: Message, fp: SupportsWrite[str] | None = ..., level: int = ..., include_default: bool = ...) -> None: ... +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 58b1c1cd8f3d..2777450a77ba 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -25,8 +25,8 @@ class Message: def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... def attach(self, payload: Message) -> None: ... - def get_payload(self, i: int | None = ..., decode: bool = ...) -> Any: ... # returns _PayloadType | None - def set_payload(self, payload: _PayloadType, charset: _CharsetType = ...) -> None: ... + def get_payload(self, i: int | None = None, decode: bool = False) -> Any: ... # returns _PayloadType | None + def set_payload(self, payload: _PayloadType, charset: _CharsetType = None) -> None: ... def set_charset(self, charset: _CharsetType) -> None: ... def get_charset(self) -> _CharsetType: ... def __len__(self) -> int: ... @@ -38,8 +38,14 @@ class Message: def keys(self) -> list[str]: ... def values(self) -> list[_HeaderType]: ... def items(self) -> list[tuple[str, _HeaderType]]: ... - def get(self, name: str, failobj: _T = ...) -> _HeaderType | _T: ... - def get_all(self, name: str, failobj: _T = ...) -> list[_HeaderType] | _T: ... + @overload + def get(self, name: str, failobj: None = None) -> _HeaderType | None: ... + @overload + def get(self, name: str, failobj: _T) -> _HeaderType | _T: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[_HeaderType] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[_HeaderType] | _T: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... def replace_header(self, _name: str, _value: _HeaderType) -> None: ... def get_content_type(self) -> str: ... @@ -47,32 +53,51 @@ class Message: def get_content_subtype(self) -> str: ... def get_default_type(self) -> str: ... def set_default_type(self, ctype: str) -> None: ... - def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> list[tuple[str, str]] | _T: ... - def get_param(self, param: str, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> _T | _ParamType: ... - def del_param(self, param: str, header: str = ..., requote: bool = ...) -> None: ... - def set_type(self, type: str, header: str = ..., requote: bool = ...) -> None: ... - def get_filename(self, failobj: _T = ...) -> _T | str: ... - def get_boundary(self, failobj: _T = ...) -> _T | str: ... + @overload + def get_params( + self, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> list[tuple[str, str]] | None: ... + @overload + def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... + @overload + def get_param( + self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> _ParamType | None: ... + @overload + def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... + @overload + def get_filename(self, failobj: None = None) -> str | None: ... + @overload + def get_filename(self, failobj: _T) -> str | _T: ... + @overload + def get_boundary(self, failobj: None = None) -> str | None: ... + @overload + def get_boundary(self, failobj: _T) -> str | _T: ... def set_boundary(self, boundary: str) -> None: ... @overload def get_content_charset(self) -> str | None: ... @overload def get_content_charset(self, failobj: _T) -> str | _T: ... - def get_charsets(self, failobj: _T = ...) -> _T | list[str]: ... + @overload + def get_charsets(self, failobj: None = None) -> list[str] | None: ... + @overload + def get_charsets(self, failobj: _T) -> list[str] | _T: ... def walk(self: Self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... - def as_string(self, unixfrom: bool = ..., maxheaderlen: int = ..., policy: Policy | None = ...) -> str: ... - def as_bytes(self, unixfrom: bool = ..., policy: Policy | None = ...) -> bytes: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: ... + def as_bytes(self, unixfrom: bool = False, policy: Policy | None = None) -> bytes: ... def __bytes__(self) -> bytes: ... def set_param( self, param: str, value: str, - header: str = ..., - requote: bool = ..., - charset: str | None = ..., - language: str = ..., - replace: bool = ..., + header: str = "Content-Type", + requote: bool = True, + charset: str | None = None, + language: str = "", + replace: bool = False, ) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API @@ -80,21 +105,21 @@ class Message: def raw_items(self) -> Iterator[tuple[str, _HeaderType]]: ... class MIMEPart(Message): - def __init__(self, policy: Policy | None = ...) -> None: ... + def __init__(self, policy: Policy | None = None) -> None: ... def get_body(self, preferencelist: Sequence[str] = ...) -> Message | None: ... def iter_attachments(self) -> Iterator[Message]: ... def iter_parts(self) -> Iterator[Message]: ... - def get_content(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> Any: ... - def set_content(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... - def make_related(self, boundary: str | None = ...) -> None: ... - def make_alternative(self, boundary: str | None = ...) -> None: ... - def make_mixed(self, boundary: str | None = ...) -> None: ... + def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... + def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... + def make_related(self, boundary: str | None = None) -> None: ... + def make_alternative(self, boundary: str | None = None) -> None: ... + def make_mixed(self, boundary: str | None = None) -> None: ... def add_related(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def add_alternative(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... - def as_string(self, unixfrom: bool = ..., maxheaderlen: int | None = ..., policy: Policy | None = ...) -> str: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy | None = None) -> str: ... def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... diff --git a/mypy/typeshed/stdlib/email/mime/application.pyi b/mypy/typeshed/stdlib/email/mime/application.pyi index 5ff60bff6ad2..a7ab9dc75ce2 100644 --- a/mypy/typeshed/stdlib/email/mime/application.pyi +++ b/mypy/typeshed/stdlib/email/mime/application.pyi @@ -9,9 +9,9 @@ class MIMEApplication(MIMENonMultipart): def __init__( self, _data: str | bytes | bytearray, - _subtype: str = ..., + _subtype: str = "octet-stream", _encoder: Callable[[MIMEApplication], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/audio.pyi b/mypy/typeshed/stdlib/email/mime/audio.pyi index 05e173f5c4a1..090dfb960db6 100644 --- a/mypy/typeshed/stdlib/email/mime/audio.pyi +++ b/mypy/typeshed/stdlib/email/mime/audio.pyi @@ -9,9 +9,9 @@ class MIMEAudio(MIMENonMultipart): def __init__( self, _audiodata: str | bytes | bytearray, - _subtype: str | None = ..., + _subtype: str | None = None, _encoder: Callable[[MIMEAudio], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/base.pyi b/mypy/typeshed/stdlib/email/mime/base.pyi index c8f2fe6db79d..b733709f1b5a 100644 --- a/mypy/typeshed/stdlib/email/mime/base.pyi +++ b/mypy/typeshed/stdlib/email/mime/base.pyi @@ -5,4 +5,4 @@ from email.policy import Policy __all__ = ["MIMEBase"] class MIMEBase(email.message.Message): - def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = ..., **_params: _ParamsType) -> None: ... + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/image.pyi b/mypy/typeshed/stdlib/email/mime/image.pyi index 7e46b835b541..b47afa6ce592 100644 --- a/mypy/typeshed/stdlib/email/mime/image.pyi +++ b/mypy/typeshed/stdlib/email/mime/image.pyi @@ -9,9 +9,9 @@ class MIMEImage(MIMENonMultipart): def __init__( self, _imagedata: str | bytes | bytearray, - _subtype: str | None = ..., + _subtype: str | None = None, _encoder: Callable[[MIMEImage], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/message.pyi b/mypy/typeshed/stdlib/email/mime/message.pyi index 9e7cd04b6e77..23cf58619ad9 100644 --- a/mypy/typeshed/stdlib/email/mime/message.pyi +++ b/mypy/typeshed/stdlib/email/mime/message.pyi @@ -5,4 +5,4 @@ from email.policy import Policy __all__ = ["MIMEMessage"] class MIMEMessage(MIMENonMultipart): - def __init__(self, _msg: Message, _subtype: str = ..., *, policy: Policy | None = ...) -> None: ... + def __init__(self, _msg: Message, _subtype: str = "rfc822", *, policy: Policy | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/multipart.pyi b/mypy/typeshed/stdlib/email/mime/multipart.pyi index 6cd480ccf0a4..6163810ed94a 100644 --- a/mypy/typeshed/stdlib/email/mime/multipart.pyi +++ b/mypy/typeshed/stdlib/email/mime/multipart.pyi @@ -9,10 +9,10 @@ __all__ = ["MIMEMultipart"] class MIMEMultipart(MIMEBase): def __init__( self, - _subtype: str = ..., - boundary: str | None = ..., - _subparts: Sequence[Message] | None = ..., + _subtype: str = "mixed", + boundary: str | None = None, + _subparts: Sequence[Message] | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/text.pyi b/mypy/typeshed/stdlib/email/mime/text.pyi index 9672c3b717b2..74d5ef4c5cae 100644 --- a/mypy/typeshed/stdlib/email/mime/text.pyi +++ b/mypy/typeshed/stdlib/email/mime/text.pyi @@ -4,4 +4,6 @@ from email.policy import Policy __all__ = ["MIMEText"] class MIMEText(MIMENonMultipart): - def __init__(self, _text: str, _subtype: str = ..., _charset: str | None = ..., *, policy: Policy | None = ...) -> None: ... + def __init__( + self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None + ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/parser.pyi b/mypy/typeshed/stdlib/email/parser.pyi index 1afd8940f4ef..ba5dace28916 100644 --- a/mypy/typeshed/stdlib/email/parser.pyi +++ b/mypy/typeshed/stdlib/email/parser.pyi @@ -7,15 +7,19 @@ from typing import BinaryIO, TextIO __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] class Parser: - def __init__(self, _class: Callable[[], Message] | None = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... - def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... + def __init__(self, _class: Callable[[], Message] | None = None, *, policy: Policy = ...) -> None: ... + def parse(self, fp: TextIO, headersonly: bool = False) -> Message: ... + def parsestr(self, text: str, headersonly: bool = False) -> Message: ... -class HeaderParser(Parser): ... +class HeaderParser(Parser): + def parse(self, fp: TextIO, headersonly: bool = True) -> Message: ... + def parsestr(self, text: str, headersonly: bool = True) -> Message: ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... - def parsebytes(self, text: bytes | bytearray, headersonly: bool = ...) -> Message: ... + def parse(self, fp: BinaryIO, headersonly: bool = False) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> Message: ... -class BytesHeaderParser(BytesParser): ... +class BytesHeaderParser(BytesParser): + def parse(self, fp: BinaryIO, headersonly: bool = True) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = True) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/quoprimime.pyi b/mypy/typeshed/stdlib/email/quoprimime.pyi index ec0c799583bf..87d08eecc70c 100644 --- a/mypy/typeshed/stdlib/email/quoprimime.pyi +++ b/mypy/typeshed/stdlib/email/quoprimime.pyi @@ -19,9 +19,9 @@ def header_length(bytearray: Iterable[int]) -> int: ... def body_length(bytearray: Iterable[int]) -> int: ... def unquote(s: str | bytes | bytearray) -> str: ... def quote(c: str | bytes | bytearray) -> str: ... -def header_encode(header_bytes: bytes | bytearray, charset: str = ...) -> str: ... -def body_encode(body: str, maxlinelen: int = ..., eol: str = ...) -> str: ... -def decode(encoded: str, eol: str = ...) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(encoded: str, eol: str = "\n") -> str: ... def header_decode(s: str) -> str: ... body_decode = decode diff --git a/mypy/typeshed/stdlib/email/utils.pyi b/mypy/typeshed/stdlib/email/utils.pyi index 480c5f79549d..090ddf9e31bc 100644 --- a/mypy/typeshed/stdlib/email/utils.pyi +++ b/mypy/typeshed/stdlib/email/utils.pyi @@ -28,7 +28,7 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None def quote(str: str) -> str: ... def unquote(str: str) -> str: ... def parseaddr(addr: str | None) -> tuple[str, str]: ... -def formataddr(pair: tuple[str | None, str], charset: str | Charset = ...) -> str: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: ... @overload def parsedate(data: None) -> None: ... @@ -49,11 +49,11 @@ else: def parsedate_to_datetime(data: str) -> datetime.datetime: ... def mktime_tz(data: _PDTZ) -> int: ... -def formatdate(timeval: float | None = ..., localtime: bool = ..., usegmt: bool = ...) -> str: ... -def format_datetime(dt: datetime.datetime, usegmt: bool = ...) -> str: ... -def localtime(dt: datetime.datetime | None = ..., isdst: int = ...) -> datetime.datetime: ... -def make_msgid(idstring: str | None = ..., domain: str | None = ...) -> str: ... +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... +def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... -def encode_rfc2231(s: str, charset: str | None = ..., language: str | None = ...) -> str: ... -def collapse_rfc2231_value(value: _ParamType, errors: str = ..., fallback_charset: str = ...) -> str: ... +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... +def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8.pyi b/mypy/typeshed/stdlib/encodings/utf_8.pyi index 8e73756199c1..0de51026f9f5 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8.pyi @@ -2,20 +2,20 @@ import codecs from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): - def encode(self, input: str, final: bool = ...) -> bytes: ... + def encode(self, input: str, final: bool = False) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): @staticmethod - def _buffer_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def _buffer_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): @staticmethod - def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): @staticmethod - def decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... -def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... +def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi index 27171063f53f..150fe22f8f6e 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -2,21 +2,21 @@ import codecs from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): - def __init__(self, errors: str = ...) -> None: ... - def encode(self, input: str, final: bool = ...) -> bytes: ... + def __init__(self, errors: str = "strict") -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... def getstate(self) -> int: ... # type: ignore[override] def setstate(self, state: int) -> None: ... # type: ignore[override] class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): - def encode(self, input: str, errors: str | None = ...) -> tuple[bytes, int]: ... + def encode(self, input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): - def decode(self, input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... -def encode(input: str, errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... +def encode(input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/ensurepip/__init__.pyi b/mypy/typeshed/stdlib/ensurepip/__init__.pyi index e2686b8d5437..332fb1845917 100644 --- a/mypy/typeshed/stdlib/ensurepip/__init__.pyi +++ b/mypy/typeshed/stdlib/ensurepip/__init__.pyi @@ -3,10 +3,10 @@ __all__ = ["version", "bootstrap"] def version() -> str: ... def bootstrap( *, - root: str | None = ..., - upgrade: bool = ..., - user: bool = ..., - altinstall: bool = ..., - default_pip: bool = ..., - verbosity: int = ..., + root: str | None = None, + upgrade: bool = False, + user: bool = False, + altinstall: bool = False, + default_pip: bool = False, + verbosity: int = 0, ) -> None: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index a14744f1ba8d..182076731ab2 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -1,6 +1,6 @@ import sys import types -from _typeshed import Self, SupportsKeysAndGetItem +from _typeshed import Self, SupportsKeysAndGetItem, Unused from abc import ABCMeta from builtins import property as _builtins_property from collections.abc import Iterable, Iterator, Mapping @@ -85,8 +85,8 @@ class EnumMeta(ABCMeta): bases: tuple[type, ...], classdict: _EnumDict, *, - boundary: FlagBoundary | None = ..., - _simple: bool = ..., + boundary: FlagBoundary | None = None, + _simple: bool = False, **kwds: Any, ) -> Self: ... elif sys.version_info >= (3, 9): @@ -112,7 +112,7 @@ class EnumMeta(ABCMeta): def __dir__(self) -> list[str]: ... # Simple value lookup @overload # type: ignore[override] - def __call__(cls: type[_EnumMemberT], value: Any, names: None = ...) -> _EnumMemberT: ... + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... # Functional Enum API if sys.version_info >= (3, 11): @overload @@ -121,11 +121,11 @@ class EnumMeta(ABCMeta): value: str, names: _EnumNames, *, - module: str | None = ..., - qualname: str | None = ..., - type: type | None = ..., - start: int = ..., - boundary: FlagBoundary | None = ..., + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + boundary: FlagBoundary | None = None, ) -> type[Enum]: ... else: @overload @@ -134,10 +134,10 @@ class EnumMeta(ABCMeta): value: str, names: _EnumNames, *, - module: str | None = ..., - qualname: str | None = ..., - type: type | None = ..., - start: int = ..., + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, ) -> type[Enum]: ... _member_names_: list[str] # undocumented _member_map_: dict[str, Enum] # undocumented @@ -177,7 +177,7 @@ class Enum(metaclass=EnumMeta): def __new__(cls: type[Self], value: object) -> Self: ... def __dir__(self) -> list[str]: ... def __format__(self, format_spec: str) -> str: ... - def __reduce_ex__(self, proto: object) -> tuple[Any, ...]: ... + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): class ReprEnum(Enum): ... @@ -275,6 +275,6 @@ if sys.version_info >= (3, 11): KEEP = FlagBoundary.KEEP def global_str(self: Enum) -> str: ... - def global_enum(cls: _EnumerationT, update_str: bool = ...) -> _EnumerationT: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... def global_enum_repr(self: Enum) -> str: ... def global_flag_repr(self: Flag) -> str: ... diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 2df16083c0b7..90676e365712 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -101,16 +101,16 @@ if sys.platform != "win32": I_SWROPT: int I_UNLINK: int @overload - def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = 0) -> int: ... @overload def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: str | ReadOnlyBuffer) -> bytes: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = 0, __mutate_flag: bool = True) -> int: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = ...) -> int: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = True) -> int: ... @overload def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[False]) -> bytes: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = ...) -> bytes: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = True) -> bytes: ... def flock(__fd: FileDescriptorLike, __operation: int) -> None: ... - def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = ..., __start: int = ..., __whence: int = ...) -> Any: ... + def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = 0, __start: int = 0, __whence: int = 0) -> Any: ... diff --git a/mypy/typeshed/stdlib/filecmp.pyi b/mypy/typeshed/stdlib/filecmp.pyi index dd4a0628b026..008d7a44e6c4 100644 --- a/mypy/typeshed/stdlib/filecmp.pyi +++ b/mypy/typeshed/stdlib/filecmp.pyi @@ -12,9 +12,9 @@ __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: list[str] BUFSIZE: Literal[8192] -def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = ...) -> bool: ... +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... def cmpfiles( - a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = ... + a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True ) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... class dircmp(Generic[AnyStr]): @@ -22,8 +22,8 @@ class dircmp(Generic[AnyStr]): self, a: GenericPath[AnyStr], b: GenericPath[AnyStr], - ignore: Sequence[AnyStr] | None = ..., - hide: Sequence[AnyStr] | None = ..., + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, ) -> None: ... left: AnyStr right: AnyStr diff --git a/mypy/typeshed/stdlib/fileinput.pyi b/mypy/typeshed/stdlib/fileinput.pyi index e0babbcd40cc..17379e92ba5f 100644 --- a/mypy/typeshed/stdlib/fileinput.pyi +++ b/mypy/typeshed/stdlib/fileinput.pyi @@ -36,89 +36,89 @@ if sys.version_info >= (3, 10): # encoding and errors are added @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> FileInput[str]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., - encoding: None = ..., - errors: None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> FileInput[Any]: ... elif sys.version_info >= (3, 8): # bufsize is dropped and mode and openhook become keyword-only @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> FileInput[str]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... else: @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> FileInput[str]: ... # Because mode isn't keyword-only here yet, we need two overloads each for # the bytes case and the fallback case. @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( @@ -127,17 +127,17 @@ else: backup: str, bufsize: int, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... @overload def input( @@ -146,7 +146,7 @@ else: backup: str, bufsize: int, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... def close() -> None: ... @@ -164,38 +164,38 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., - encoding: None = ..., - errors: None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... elif sys.version_info >= (3, 8): @@ -203,57 +203,57 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... else: @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> None: ... # Because mode isn't keyword-only here yet, we need two overloads each for # the bytes case and the fallback case. @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( @@ -263,18 +263,18 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): backup: str, bufsize: int, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... @overload def __init__( @@ -284,7 +284,7 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): backup: str, bufsize: int, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... def __del__(self) -> None: ... @@ -311,10 +311,10 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): if sys.version_info >= (3, 10): def hook_compressed( - filename: StrOrBytesPath, mode: str, *, encoding: str | None = ..., errors: str | None = ... + filename: StrOrBytesPath, mode: str, *, encoding: str | None = None, errors: str | None = None ) -> IO[Any]: ... else: def hook_compressed(filename: StrOrBytesPath, mode: str) -> IO[Any]: ... -def hook_encoded(encoding: str, errors: str | None = ...) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... +def hook_encoded(encoding: str, errors: str | None = None) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... diff --git a/mypy/typeshed/stdlib/formatter.pyi b/mypy/typeshed/stdlib/formatter.pyi index 388dbd6071ac..05c3c8b3dd41 100644 --- a/mypy/typeshed/stdlib/formatter.pyi +++ b/mypy/typeshed/stdlib/formatter.pyi @@ -8,11 +8,11 @@ _StylesType: TypeAlias = tuple[Any, ...] class NullFormatter: writer: NullWriter | None - def __init__(self, writer: NullWriter | None = ...) -> None: ... + def __init__(self, writer: NullWriter | None = None) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def add_label_data(self, format: str, counter: int, blankline: int | None = ...) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... @@ -24,8 +24,8 @@ class NullFormatter: def pop_margin(self) -> None: ... def set_spacing(self, spacing: str | None) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... - def pop_style(self, n: int = ...) -> None: ... - def assert_line_data(self, flag: int = ...) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... class AbstractFormatter: writer: NullWriter @@ -45,7 +45,7 @@ class AbstractFormatter: def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def add_label_data(self, format: str, counter: int, blankline: int | None = ...) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... def format_counter(self, format: Iterable[str], counter: int) -> str: ... def format_letter(self, case: str, counter: int) -> str: ... def format_roman(self, case: str, counter: int) -> str: ... @@ -60,8 +60,8 @@ class AbstractFormatter: def pop_margin(self) -> None: ... def set_spacing(self, spacing: str | None) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... - def pop_style(self, n: int = ...) -> None: ... - def assert_line_data(self, flag: int = ...) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... class NullWriter: def flush(self) -> None: ... @@ -82,7 +82,7 @@ class AbstractWriter(NullWriter): ... class DumbWriter(NullWriter): file: IO[str] maxcol: int - def __init__(self, file: IO[str] | None = ..., maxcol: int = ...) -> None: ... + def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... def reset(self) -> None: ... -def test(file: str | None = ...) -> None: ... +def test(file: str | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/fractions.pyi b/mypy/typeshed/stdlib/fractions.pyi index e05f59e3d191..95e4aad0f9ca 100644 --- a/mypy/typeshed/stdlib/fractions.pyi +++ b/mypy/typeshed/stdlib/fractions.pyi @@ -24,15 +24,15 @@ else: class Fraction(Rational): @overload def __new__( - cls: type[Self], numerator: int | Rational = ..., denominator: int | Rational | None = ..., *, _normalize: bool = ... + cls: type[Self], numerator: int | Rational = 0, denominator: int | Rational | None = None, *, _normalize: bool = True ) -> Self: ... @overload - def __new__(cls: type[Self], __value: float | Decimal | str, *, _normalize: bool = ...) -> Self: ... + def __new__(cls: type[Self], __value: float | Decimal | str, *, _normalize: bool = True) -> Self: ... @classmethod def from_float(cls: type[Self], f: float) -> Self: ... @classmethod def from_decimal(cls: type[Self], dec: Decimal) -> Self: ... - def limit_denominator(self, max_denominator: int = ...) -> Fraction: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> tuple[int, int]: ... @@ -129,7 +129,7 @@ class Fraction(Rational): def __floor__(a) -> int: ... def __ceil__(a) -> int: ... @overload - def __round__(self, ndigits: None = ...) -> int: ... + def __round__(self, ndigits: None = None) -> int: ... @overload def __round__(self, ndigits: int) -> Fraction: ... def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 3d284c597019..6c33f1409822 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -44,28 +44,28 @@ class FTP: if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - encoding: str = ..., + encoding: str = "utf-8", ) -> None: ... else: def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, ) -> None: ... def connect( - self, host: str = ..., port: int = ..., timeout: float = ..., source_address: tuple[str, int] | None = ... + self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None ) -> str: ... def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... @@ -85,28 +85,28 @@ class FTP: def sendeprt(self, host: str, port: int) -> str: ... def makeport(self) -> socket: ... def makepasv(self) -> tuple[str, int]: ... - def login(self, user: str = ..., passwd: str = ..., acct: str = ...) -> str: ... + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. - def ntransfercmd(self, cmd: str, rest: int | str | None = ...) -> tuple[socket, int]: ... - def transfercmd(self, cmd: str, rest: int | str | None = ...) -> socket: ... + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int]: ... + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... def retrbinary( - self, cmd: str, callback: Callable[[bytes], object], blocksize: int = ..., rest: int | str | None = ... + self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None ) -> str: ... def storbinary( self, cmd: str, fp: SupportsRead[bytes], - blocksize: int = ..., - callback: Callable[[bytes], object] | None = ..., - rest: int | str | None = ..., + blocksize: int = 8192, + callback: Callable[[bytes], object] | None = None, + rest: int | str | None = None, ) -> str: ... - def retrlines(self, cmd: str, callback: Callable[[str], object] | None = ...) -> str: ... - def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = ...) -> str: ... + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... def acct(self, password: str) -> str: ... def nlst(self, *args: str) -> list[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: str | Callable[[str], object]) -> None: ... - def mlsd(self, path: str = ..., facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... + def mlsd(self, path: str = "", facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... def rename(self, fromname: str, toname: str) -> str: ... def delete(self, filename: str) -> str: ... def cwd(self, dirname: str) -> str: ... @@ -121,36 +121,36 @@ class FTP_TLS(FTP): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - context: SSLContext | None = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - encoding: str = ..., + encoding: str = "utf-8", ) -> None: ... else: def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - context: SSLContext | None = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, ) -> None: ... ssl_version: int keyfile: str | None certfile: str | None context: SSLContext - def login(self, user: str = ..., passwd: str = ..., acct: str = ..., secure: bool = ...) -> str: ... + def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... def auth(self) -> str: ... def prot_p(self) -> str: ... def prot_c(self) -> str: ... @@ -161,5 +161,5 @@ def parse227(resp: str) -> tuple[str, int]: ... # undocumented def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented def parse257(resp: str) -> str: ... # undocumented def ftpcp( - source: FTP, sourcename: str, target: FTP, targetname: str = ..., type: Literal["A", "I"] = ... + source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 5c3f662c3dd5..8778798144de 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -55,15 +55,15 @@ class _lru_cache_wrapper(Generic[_T]): if sys.version_info >= (3, 8): @overload - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... @overload - def lru_cache(maxsize: Callable[..., _T], typed: bool = ...) -> _lru_cache_wrapper[_T]: ... + def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... else: - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... WRAPPER_ASSIGNMENTS: tuple[ - Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"], + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] @@ -96,7 +96,7 @@ class partialmethod(Generic[_T]): @overload def __init__(self, __func: _Descriptor, *args: Any, **keywords: Any) -> None: ... if sys.version_info >= (3, 8): - def __get__(self, obj: Any, cls: type[Any] | None = ...) -> Callable[..., _T]: ... + def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... else: def __get__(self, obj: Any, cls: type[Any] | None) -> Callable[..., _T]: ... @@ -132,21 +132,21 @@ if sys.version_info >= (3, 8): @property def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload - def register(self, cls: Callable[..., _T], method: None = ...) -> Callable[..., _T]: ... + def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... - def __get__(self, obj: _S, cls: type[_S] | None = ...) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... class cached_property(Generic[_T]): func: Callable[[Any], _T] attrname: str | None def __init__(self, func: Callable[[Any], _T]) -> None: ... @overload - def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + def __get__(self, instance: None, owner: type[Any] | None = None) -> cached_property[_T]: ... @overload - def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... def __set_name__(self, owner: type[Any], name: str) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/gc.pyi b/mypy/typeshed/stdlib/gc.pyi index d24b7c1f4c7c..27cee726ba09 100644 --- a/mypy/typeshed/stdlib/gc.pyi +++ b/mypy/typeshed/stdlib/gc.pyi @@ -14,14 +14,14 @@ _CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], callbacks: list[_CallbackType] garbage: list[Any] -def collect(generation: int = ...) -> int: ... +def collect(generation: int = 2) -> int: ... def disable() -> None: ... def enable() -> None: ... def get_count() -> tuple[int, int, int]: ... def get_debug() -> int: ... if sys.version_info >= (3, 8): - def get_objects(generation: int | None = ...) -> list[Any]: ... + def get_objects(generation: int | None = None) -> list[Any]: ... else: def get_objects() -> list[Any]: ... diff --git a/mypy/typeshed/stdlib/getopt.pyi b/mypy/typeshed/stdlib/getopt.pyi index 42ddb1cb7020..14d63dbd6f99 100644 --- a/mypy/typeshed/stdlib/getopt.pyi +++ b/mypy/typeshed/stdlib/getopt.pyi @@ -6,6 +6,6 @@ def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tu class GetoptError(Exception): msg: str opt: str - def __init__(self, msg: str, opt: str = ...) -> None: ... + def __init__(self, msg: str, opt: str = "") -> None: ... error = GetoptError diff --git a/mypy/typeshed/stdlib/getpass.pyi b/mypy/typeshed/stdlib/getpass.pyi index 153db2f4cb9e..6104e0dedfee 100644 --- a/mypy/typeshed/stdlib/getpass.pyi +++ b/mypy/typeshed/stdlib/getpass.pyi @@ -2,7 +2,7 @@ from typing import TextIO __all__ = ["getpass", "getuser", "GetPassWarning"] -def getpass(prompt: str = ..., stream: TextIO | None = ...) -> str: ... +def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... def getuser() -> str: ... class GetPassWarning(UserWarning): ... diff --git a/mypy/typeshed/stdlib/gettext.pyi b/mypy/typeshed/stdlib/gettext.pyi index 3c07abeb2d8a..5d98227ec1f4 100644 --- a/mypy/typeshed/stdlib/gettext.pyi +++ b/mypy/typeshed/stdlib/gettext.pyi @@ -32,7 +32,7 @@ class _TranslationsReader(Protocol): # name: str class NullTranslations: - def __init__(self, fp: _TranslationsReader | None = ...) -> None: ... + def __init__(self, fp: _TranslationsReader | None = None) -> None: ... def _parse(self, fp: _TranslationsReader) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... @@ -49,7 +49,7 @@ class NullTranslations: def lgettext(self, message: str) -> str: ... def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... - def install(self, names: Container[str] | None = ...) -> None: ... + def install(self, names: Container[str] | None = None) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC: Final[int] @@ -59,14 +59,16 @@ class GNUTranslations(NullTranslations): @overload # ignores incompatible overloads def find( # type: ignore[misc] - domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[False] = ... + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: Literal[False] = False ) -> str | None: ... @overload def find( - domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[True] = ... + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, *, all: Literal[True] ) -> list[str]: ... @overload -def find(domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: bool = ...) -> Any: ... +def find(domain: str, localedir: StrPath | None, languages: Iterable[str] | None, all: Literal[True]) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: bool = False) -> Any: ... _NullTranslationsT = TypeVar("_NullTranslationsT", bound=NullTranslations) @@ -74,19 +76,19 @@ if sys.version_info >= (3, 11): @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: None = ..., - fallback: Literal[False] = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, ) -> GNUTranslations: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, *, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., + fallback: Literal[False] = False, ) -> _NullTranslationsT: ... @overload def translation( @@ -94,37 +96,37 @@ if sys.version_info >= (3, 11): localedir: StrPath | None, languages: Iterable[str] | None, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., + fallback: Literal[False] = False, ) -> _NullTranslationsT: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., - fallback: bool = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, ) -> NullTranslations: ... - def install(domain: str, localedir: StrPath | None = ..., *, names: Container[str] | None = ...) -> None: ... + def install(domain: str, localedir: StrPath | None = None, *, names: Container[str] | None = None) -> None: ... else: @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: None = ..., - fallback: Literal[False] = ..., - codeset: str | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + codeset: str | None = None, ) -> GNUTranslations: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, *, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., - codeset: str | None = ..., + fallback: Literal[False] = False, + codeset: str | None = None, ) -> _NullTranslationsT: ... @overload def translation( @@ -132,24 +134,24 @@ else: localedir: StrPath | None, languages: Iterable[str] | None, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., - codeset: str | None = ..., + fallback: Literal[False] = False, + codeset: str | None = None, ) -> _NullTranslationsT: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., - fallback: bool = ..., - codeset: str | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + codeset: str | None = None, ) -> NullTranslations: ... def install( - domain: str, localedir: StrPath | None = ..., codeset: str | None = ..., names: Container[str] | None = ... + domain: str, localedir: StrPath | None = None, codeset: str | None = None, names: Container[str] | None = None ) -> None: ... -def textdomain(domain: str | None = ...) -> str: ... -def bindtextdomain(domain: str, localedir: StrPath | None = ...) -> str: ... +def textdomain(domain: str | None = None) -> str: ... +def bindtextdomain(domain: str, localedir: StrPath | None = None) -> str: ... def dgettext(domain: str, message: str) -> str: ... def dngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... def gettext(message: str) -> str: ... @@ -166,6 +168,6 @@ if sys.version_info < (3, 11): def ldgettext(domain: str, message: str) -> str: ... def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... - def bind_textdomain_codeset(domain: str, codeset: str | None = ...) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None = None) -> str: ... Catalog = translation diff --git a/mypy/typeshed/stdlib/glob.pyi b/mypy/typeshed/stdlib/glob.pyi index c63563d19f58..914ccc12ef1e 100644 --- a/mypy/typeshed/stdlib/glob.pyi +++ b/mypy/typeshed/stdlib/glob.pyi @@ -12,31 +12,31 @@ if sys.version_info >= (3, 11): def glob( pathname: AnyStr, *, - root_dir: StrOrBytesPath | None = ..., - dir_fd: int | None = ..., - recursive: bool = ..., - include_hidden: bool = ..., + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, ) -> list[AnyStr]: ... def iglob( pathname: AnyStr, *, - root_dir: StrOrBytesPath | None = ..., - dir_fd: int | None = ..., - recursive: bool = ..., - include_hidden: bool = ..., + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, ) -> Iterator[AnyStr]: ... elif sys.version_info >= (3, 10): def glob( - pathname: AnyStr, *, root_dir: StrOrBytesPath | None = ..., dir_fd: int | None = ..., recursive: bool = ... + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False ) -> list[AnyStr]: ... def iglob( - pathname: AnyStr, *, root_dir: StrOrBytesPath | None = ..., dir_fd: int | None = ..., recursive: bool = ... + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False ) -> Iterator[AnyStr]: ... else: - def glob(pathname: AnyStr, *, recursive: bool = ...) -> list[AnyStr]: ... - def iglob(pathname: AnyStr, *, recursive: bool = ...) -> Iterator[AnyStr]: ... + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... def escape(pathname: AnyStr) -> AnyStr: ... def has_magic(s: str | bytes) -> bool: ... # undocumented diff --git a/mypy/typeshed/stdlib/graphlib.pyi b/mypy/typeshed/stdlib/graphlib.pyi index 4c6959decc4b..c02d447ad501 100644 --- a/mypy/typeshed/stdlib/graphlib.pyi +++ b/mypy/typeshed/stdlib/graphlib.pyi @@ -12,7 +12,7 @@ if sys.version_info >= (3, 11): class TopologicalSorter(Generic[_T]): @overload - def __init__(self, graph: None = ...) -> None: ... + def __init__(self, graph: None = None) -> None: ... @overload def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... def add(self, node: _T, *predecessors: _T) -> None: ... diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi index 580e605b6b38..6a794f381ad6 100644 --- a/mypy/typeshed/stdlib/gzip.pyi +++ b/mypy/typeshed/stdlib/gzip.pyi @@ -43,45 +43,45 @@ class _WritableFileobj(Protocol): @overload def open( filename: StrOrBytesPath | _ReadableFileobj, - mode: _ReadBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> GzipFile: ... @overload def open( filename: StrOrBytesPath | _WritableFileobj, mode: _WriteBinaryMode, - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> GzipFile: ... @overload def open( filename: StrOrBytesPath, mode: _OpenTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, mode: str, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> GzipFile | TextIO: ... class _PaddedFile: file: _ReadableFileobj - def __init__(self, f: _ReadableFileobj, prepend: bytes = ...) -> None: ... + def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... def read(self, size: int) -> bytes: ... - def prepend(self, prepend: bytes = ...) -> None: ... + def prepend(self, prepend: bytes = b"") -> None: ... def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... @@ -99,45 +99,45 @@ class GzipFile(_compression.BaseStream): self, filename: StrOrBytesPath | None, mode: _ReadBinaryMode, - compresslevel: int = ..., - fileobj: _ReadableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, *, mode: _ReadBinaryMode, - compresslevel: int = ..., - fileobj: _ReadableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, filename: StrOrBytesPath | None, mode: _WriteBinaryMode, - compresslevel: int = ..., - fileobj: _WritableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, *, mode: _WriteBinaryMode, - compresslevel: int = ..., - fileobj: _WritableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, - filename: StrOrBytesPath | None = ..., - mode: str | None = ..., - compresslevel: int = ..., - fileobj: _ReadableFileobj | _WritableFileobj | None = ..., - mtime: float | None = ..., + filename: StrOrBytesPath | None = None, + mode: str | None = None, + compresslevel: int = 9, + fileobj: _ReadableFileobj | _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @property def filename(self) -> str: ... @@ -145,23 +145,23 @@ class GzipFile(_compression.BaseStream): def mtime(self) -> int | None: ... crc: int def write(self, data: ReadableBuffer) -> int: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... def peek(self, n: int) -> bytes: ... def close(self) -> None: ... - def flush(self, zlib_mode: int = ...) -> None: ... + def flush(self, zlib_mode: int = 2) -> None: ... def fileno(self) -> int: ... def rewind(self) -> None: ... - def seek(self, offset: int, whence: int = ...) -> int: ... - def readline(self, size: int | None = ...) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... class _GzipReader(_compression.DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... if sys.version_info >= (3, 8): - def compress(data: _BufferWithLen, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... else: - def compress(data: _BufferWithLen, compresslevel: int = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = 9) -> bytes: ... def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/mypy/typeshed/stdlib/hashlib.pyi b/mypy/typeshed/stdlib/hashlib.pyi index 2a417364b171..8292e319330a 100644 --- a/mypy/typeshed/stdlib/hashlib.pyi +++ b/mypy/typeshed/stdlib/hashlib.pyi @@ -62,25 +62,25 @@ class _Hash: def update(self, __data: ReadableBuffer) -> None: ... if sys.version_info >= (3, 9): - def new(name: str, data: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def md5(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha1(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha224(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha256(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha384(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha512(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> _Hash: ... + def md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... elif sys.version_info >= (3, 8): - def new(name: str, data: ReadableBuffer = ...) -> _Hash: ... - def md5(string: ReadableBuffer = ...) -> _Hash: ... - def sha1(string: ReadableBuffer = ...) -> _Hash: ... - def sha224(string: ReadableBuffer = ...) -> _Hash: ... - def sha256(string: ReadableBuffer = ...) -> _Hash: ... - def sha384(string: ReadableBuffer = ...) -> _Hash: ... - def sha512(string: ReadableBuffer = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... + def md5(string: ReadableBuffer = b"") -> _Hash: ... + def sha1(string: ReadableBuffer = b"") -> _Hash: ... + def sha224(string: ReadableBuffer = b"") -> _Hash: ... + def sha256(string: ReadableBuffer = b"") -> _Hash: ... + def sha384(string: ReadableBuffer = b"") -> _Hash: ... + def sha512(string: ReadableBuffer = b"") -> _Hash: ... else: - def new(name: str, data: ReadableBuffer = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... def md5(__string: ReadableBuffer = ...) -> _Hash: ... def sha1(__string: ReadableBuffer = ...) -> _Hash: ... def sha224(__string: ReadableBuffer = ...) -> _Hash: ... @@ -92,7 +92,7 @@ algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] def pbkdf2_hmac( - hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = ... + hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None ) -> bytes: ... class _VarLenHash: @@ -115,12 +115,12 @@ shake_256 = _VarLenHash def scrypt( password: ReadableBuffer, *, - salt: ReadableBuffer | None = ..., - n: int | None = ..., - r: int | None = ..., - p: int | None = ..., - maxmem: int = ..., - dklen: int = ..., + salt: ReadableBuffer | None = None, + n: int | None = None, + r: int | None = None, + p: int | None = None, + maxmem: int = 0, + dklen: int = 64, ) -> bytes: ... @final class _BlakeHash(_Hash): @@ -177,5 +177,5 @@ if sys.version_info >= (3, 11): def readable(self) -> bool: ... def file_digest( - __fileobj: _BytesIOLike | _FileDigestFileObj, __digest: str | Callable[[], _Hash], *, _bufsize: int = ... + __fileobj: _BytesIOLike | _FileDigestFileObj, __digest: str | Callable[[], _Hash], *, _bufsize: int = 262144 ) -> _Hash: ... diff --git a/mypy/typeshed/stdlib/heapq.pyi b/mypy/typeshed/stdlib/heapq.pyi index b280322685db..9d7815507ea5 100644 --- a/mypy/typeshed/stdlib/heapq.pyi +++ b/mypy/typeshed/stdlib/heapq.pyi @@ -10,8 +10,8 @@ _S = TypeVar("_S") __about__: str def merge( - *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ..., reverse: bool = ... + *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False ) -> Iterable[_S]: ... -def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... -def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... def _heapify_max(__x: list[Any]) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/hmac.pyi b/mypy/typeshed/stdlib/hmac.pyi index dc29836b6b87..b9a867f7bd61 100644 --- a/mypy/typeshed/stdlib/hmac.pyi +++ b/mypy/typeshed/stdlib/hmac.pyi @@ -23,14 +23,14 @@ if sys.version_info >= (3, 8): def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... else: - def new(key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod | None = None) -> HMAC: ... class HMAC: digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... def update(self, msg: ReadableBuffer) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... diff --git a/mypy/typeshed/stdlib/html/__init__.pyi b/mypy/typeshed/stdlib/html/__init__.pyi index 109c5f4b50fb..afba90832535 100644 --- a/mypy/typeshed/stdlib/html/__init__.pyi +++ b/mypy/typeshed/stdlib/html/__init__.pyi @@ -2,5 +2,5 @@ from typing import AnyStr __all__ = ["escape", "unescape"] -def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... +def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... def unescape(s: AnyStr) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/html/parser.pyi b/mypy/typeshed/stdlib/html/parser.pyi index 6dde9f705978..d322ade965d9 100644 --- a/mypy/typeshed/stdlib/html/parser.pyi +++ b/mypy/typeshed/stdlib/html/parser.pyi @@ -4,7 +4,7 @@ from re import Pattern __all__ = ["HTMLParser"] class HTMLParser(ParserBase): - def __init__(self, *, convert_charrefs: bool = ...) -> None: ... + def __init__(self, *, convert_charrefs: bool = True) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> None: ... def get_starttag_text(self) -> str | None: ... diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 53cefc0a33d1..bb641875e55b 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -114,12 +114,12 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): chunk_left: int | None length: int | None will_close: bool - def __init__(self, sock: socket, debuglevel: int = ..., method: str | None = ..., url: str | None = ...) -> None: ... - def peek(self, n: int = ...) -> bytes: ... - def read(self, amt: int | None = ...) -> bytes: ... - def read1(self, n: int = ...) -> bytes: ... + def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... + def peek(self, n: int = -1) -> bytes: ... + def read(self, amt: int | None = None) -> bytes: ... + def read1(self, n: int = -1) -> bytes: ... def readinto(self, b: WriteableBuffer) -> int: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] @overload def getheader(self, name: str) -> str | None: ... @overload @@ -148,28 +148,28 @@ class HTTPConnection: def __init__( self, host: str, - port: int | None = ..., + port: int | None = None, timeout: float | None = ..., - source_address: tuple[str, int] | None = ..., - blocksize: int = ..., + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, ) -> None: ... def request( self, method: str, url: str, - body: _DataType | str | None = ..., + body: _DataType | str | None = None, headers: Mapping[str, str] = ..., *, - encode_chunked: bool = ..., + encode_chunked: bool = False, ) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... - def set_tunnel(self, host: str, port: int | None = ..., headers: Mapping[str, str] | None = ...) -> None: ... + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... def connect(self) -> None: ... def close(self) -> None: ... - def putrequest(self, method: str, url: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... - def endheaders(self, message_body: _DataType | None = ..., *, encode_chunked: bool = ...) -> None: ... + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... def send(self, data: _DataType | str) -> None: ... class HTTPSConnection(HTTPConnection): @@ -178,15 +178,15 @@ class HTTPSConnection(HTTPConnection): def __init__( self, host: str, - port: int | None = ..., - key_file: str | None = ..., - cert_file: str | None = ..., + port: int | None = None, + key_file: str | None = None, + cert_file: str | None = None, timeout: float | None = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - context: ssl.SSLContext | None = ..., - check_hostname: bool | None = ..., - blocksize: int = ..., + context: ssl.SSLContext | None = None, + check_hostname: bool | None = None, + blocksize: int = 8192, ) -> None: ... class HTTPException(Exception): ... @@ -203,7 +203,7 @@ class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): - def __init__(self, partial: bytes, expected: int | None = ...) -> None: ... + def __init__(self, partial: bytes, expected: int | None = None) -> None: ... partial: bytes expected: int | None diff --git a/mypy/typeshed/stdlib/http/cookiejar.pyi b/mypy/typeshed/stdlib/http/cookiejar.pyi index dc3c0e17d336..7f2c9c6cc8f4 100644 --- a/mypy/typeshed/stdlib/http/cookiejar.pyi +++ b/mypy/typeshed/stdlib/http/cookiejar.pyi @@ -28,14 +28,14 @@ class CookieJar(Iterable[Cookie]): domain_re: ClassVar[Pattern[str]] # undocumented dots_re: ClassVar[Pattern[str]] # undocumented magic_re: ClassVar[Pattern[str]] # undocumented - def __init__(self, policy: CookiePolicy | None = ...) -> None: ... + def __init__(self, policy: CookiePolicy | None = None) -> None: ... def add_cookie_header(self, request: Request) -> None: ... def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... def set_policy(self, policy: CookiePolicy) -> None: ... def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... def set_cookie(self, cookie: Cookie) -> None: ... def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... - def clear(self, domain: str | None = ..., path: str | None = ..., name: str | None = ...) -> None: ... + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... def clear_session_cookies(self) -> None: ... def clear_expired_cookies(self) -> None: ... # undocumented def __iter__(self) -> Iterator[Cookie]: ... @@ -45,20 +45,22 @@ class FileCookieJar(CookieJar): filename: str delayload: bool if sys.version_info >= (3, 8): - def __init__(self, filename: StrPath | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... + def __init__( + self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None + ) -> None: ... else: - def __init__(self, filename: str | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... + def __init__(self, filename: str | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... - def save(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... - def load(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... - def revert(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... class MozillaCookieJar(FileCookieJar): if sys.version_info < (3, 10): header: ClassVar[str] # undocumented class LWPCookieJar(FileCookieJar): - def as_lwp_str(self, ignore_discard: bool = ..., ignore_expires: bool = ...) -> str: ... # undocumented + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented class CookiePolicy: netscape: bool @@ -85,35 +87,35 @@ class DefaultCookiePolicy(CookiePolicy): if sys.version_info >= (3, 8): def __init__( self, - blocked_domains: Sequence[str] | None = ..., - allowed_domains: Sequence[str] | None = ..., - netscape: bool = ..., - rfc2965: bool = ..., - rfc2109_as_netscape: bool | None = ..., - hide_cookie2: bool = ..., - strict_domain: bool = ..., - strict_rfc2965_unverifiable: bool = ..., - strict_ns_unverifiable: bool = ..., - strict_ns_domain: int = ..., - strict_ns_set_initial_dollar: bool = ..., - strict_ns_set_path: bool = ..., + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, secure_protocols: Sequence[str] = ..., ) -> None: ... else: def __init__( self, - blocked_domains: Sequence[str] | None = ..., - allowed_domains: Sequence[str] | None = ..., - netscape: bool = ..., - rfc2965: bool = ..., - rfc2109_as_netscape: bool | None = ..., - hide_cookie2: bool = ..., - strict_domain: bool = ..., - strict_rfc2965_unverifiable: bool = ..., - strict_ns_unverifiable: bool = ..., - strict_ns_domain: int = ..., - strict_ns_set_initial_dollar: bool = ..., - strict_ns_set_path: bool = ..., + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, ) -> None: ... def blocked_domains(self) -> tuple[str, ...]: ... @@ -170,7 +172,7 @@ class Cookie: comment: str | None, comment_url: str | None, rest: dict[str, str], - rfc2109: bool = ..., + rfc2109: bool = False, ) -> None: ... def has_nonstandard_attr(self, name: str) -> bool: ... @overload @@ -178,4 +180,4 @@ class Cookie: @overload def get_nonstandard_attr(self, name: str, default: _T) -> str | _T: ... def set_nonstandard_attr(self, name: str, value: str) -> None: ... - def is_expired(self, now: int | None = ...) -> bool: ... + def is_expired(self, now: int | None = None) -> bool: ... diff --git a/mypy/typeshed/stdlib/http/cookies.pyi b/mypy/typeshed/stdlib/http/cookies.pyi index e2fe44d305ef..e24ef9cbdd2e 100644 --- a/mypy/typeshed/stdlib/http/cookies.pyi +++ b/mypy/typeshed/stdlib/http/cookies.pyi @@ -31,29 +31,29 @@ class Morsel(dict[str, Any], Generic[_T]): def key(self) -> str: ... def __init__(self) -> None: ... def set(self, key: str, val: str, coded_val: _T) -> None: ... - def setdefault(self, key: str, val: str | None = ...) -> str: ... + def setdefault(self, key: str, val: str | None = None) -> str: ... # The dict update can also get a keywords argument so this is incompatible @overload # type: ignore[override] def update(self, values: Mapping[str, str]) -> None: ... @overload def update(self, values: Iterable[tuple[str, str]]) -> None: ... def isReservedKey(self, K: str) -> bool: ... - def output(self, attrs: list[str] | None = ..., header: str = ...) -> str: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:") -> str: ... __str__ = output - def js_output(self, attrs: list[str] | None = ...) -> str: ... - def OutputString(self, attrs: list[str] | None = ...) -> str: ... + def js_output(self, attrs: list[str] | None = None) -> str: ... + def OutputString(self, attrs: list[str] | None = None) -> str: ... def __eq__(self, morsel: object) -> bool: ... def __setitem__(self, K: str, V: Any) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): - def __init__(self, input: _DataType | None = ...) -> None: ... + def __init__(self, input: _DataType | None = None) -> None: ... def value_decode(self, val: str) -> _T: ... def value_encode(self, val: _T) -> str: ... - def output(self, attrs: list[str] | None = ..., header: str = ..., sep: str = ...) -> str: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... __str__ = output - def js_output(self, attrs: list[str] | None = ...) -> str: ... + def js_output(self, attrs: list[str] | None = None) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index 04ac28c3278e..c9700f70e791 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -35,17 +35,17 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): monthname: ClassVar[Sequence[str | None]] # undocumented def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... - def send_error(self, code: int, message: str | None = ..., explain: str | None = ...) -> None: ... - def send_response(self, code: int, message: str | None = ...) -> None: ... + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... + def send_response(self, code: int, message: str | None = None) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... - def send_response_only(self, code: int, message: str | None = ...) -> None: ... + def send_response_only(self, code: int, message: str | None = None) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... - def log_request(self, code: int | str = ..., size: int | str = ...) -> None: ... + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... - def date_time_string(self, timestamp: int | None = ...) -> str: ... + def date_time_string(self, timestamp: int | None = None) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... def parse_request(self) -> bool: ... # undocumented @@ -60,7 +60,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): client_address: _socket._RetAddress, server: socketserver.BaseServer, *, - directory: str | None = ..., + directory: str | None = None, ) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index f13e1c9b656c..8016d8bec5cd 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -41,11 +41,11 @@ class IMAP4: capabilities: tuple[str, ...] PROTOCOL_VERSION: str if sys.version_info >= (3, 9): - def __init__(self, host: str = ..., port: int = ..., timeout: float | None = ...) -> None: ... - def open(self, host: str = ..., port: int = ..., timeout: float | None = ...) -> None: ... + def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... else: - def __init__(self, host: str = ..., port: int = ...) -> None: ... - def open(self, host: str = ..., port: int = ...) -> None: ... + def __init__(self, host: str = "", port: int = 143) -> None: ... + def open(self, host: str = "", port: int = 143) -> None: ... def __getattr__(self, attr: str) -> Any: ... host: str @@ -77,11 +77,11 @@ class IMAP4: def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... def getquota(self, root: str) -> _CommandResults: ... def getquotaroot(self, mailbox: str) -> _CommandResults: ... - def list(self, directory: str = ..., pattern: str = ...) -> tuple[str, _AnyResponseData]: ... + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... def logout(self) -> tuple[str, _AnyResponseData]: ... - def lsub(self, directory: str = ..., pattern: str = ...) -> _CommandResults: ... + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... def myrights(self, mailbox: str) -> _CommandResults: ... def namespace(self) -> _CommandResults: ... def noop(self) -> tuple[str, _list[bytes]]: ... @@ -89,12 +89,12 @@ class IMAP4: def proxyauth(self, user: str) -> _CommandResults: ... def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... - def select(self, mailbox: str = ..., readonly: bool = ...) -> tuple[str, _list[bytes | None]]: ... + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... def setannotation(self, *args: str) -> _CommandResults: ... def setquota(self, root: str, limits: str) -> _CommandResults: ... def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... - def starttls(self, ssl_context: Any | None = ...) -> tuple[Literal["OK"], _list[None]]: ... + def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... def status(self, mailbox: str, names: str) -> _CommandResults: ... def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... def subscribe(self, mailbox: str) -> _CommandResults: ... @@ -113,28 +113,28 @@ class IMAP4_SSL(IMAP4): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - ssl_context: SSLContext | None = ..., - timeout: float | None = ..., + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, + timeout: float | None = None, ) -> None: ... else: def __init__( self, - host: str = ..., - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - ssl_context: SSLContext | None = ..., + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, ) -> None: ... sslobj: SSLSocket file: IO[Any] if sys.version_info >= (3, 9): - def open(self, host: str = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... else: - def open(self, host: str = ..., port: int | None = ...) -> None: ... + def open(self, host: str = "", port: int | None = 993) -> None: ... def ssl(self) -> SSLSocket: ... @@ -146,9 +146,9 @@ class IMAP4_stream(IMAP4): writefile: IO[Any] readfile: IO[Any] if sys.version_info >= (3, 9): - def open(self, host: str | None = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... else: - def open(self, host: str | None = ..., port: int | None = ...) -> None: ... + def open(self, host: str | None = None, port: int | None = None) -> None: ... class _Authenticator: mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] diff --git a/mypy/typeshed/stdlib/imghdr.pyi b/mypy/typeshed/stdlib/imghdr.pyi index 5f439779a69c..ed3647f20fc5 100644 --- a/mypy/typeshed/stdlib/imghdr.pyi +++ b/mypy/typeshed/stdlib/imghdr.pyi @@ -10,7 +10,7 @@ class _ReadableBinary(Protocol): def seek(self, offset: int) -> Any: ... @overload -def what(file: StrPath | _ReadableBinary, h: None = ...) -> str | None: ... +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... @overload def what(file: Any, h: bytes) -> str | None: ... diff --git a/mypy/typeshed/stdlib/imp.pyi b/mypy/typeshed/stdlib/imp.pyi index 889f0cac4f9f..3f2920de9c2b 100644 --- a/mypy/typeshed/stdlib/imp.pyi +++ b/mypy/typeshed/stdlib/imp.pyi @@ -29,7 +29,7 @@ IMP_HOOK: int def new_module(name: str) -> types.ModuleType: ... def get_magic() -> bytes: ... def get_tag() -> str: ... -def cache_from_source(path: StrPath, debug_override: bool | None = ...) -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... def source_from_cache(path: StrPath) -> str: ... def get_suffixes() -> list[tuple[str, str, int]]: ... @@ -48,15 +48,15 @@ class _FileLike(Protocol): def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> Any: ... # PathLike doesn't work for the pathname argument here -def load_source(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... -def load_compiled(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... +def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... def load_package(name: str, path: StrPath) -> types.ModuleType: ... def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( - name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = ... + name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None ) -> tuple[IO[Any], str, tuple[str, str, int]]: ... def reload(module: types.ModuleType) -> types.ModuleType: ... def init_builtin(name: str) -> types.ModuleType | None: ... -def load_dynamic(name: str, path: str, file: Any = ...) -> types.ModuleType: ... # file argument is ignored +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/mypy/typeshed/stdlib/importlib/__init__.pyi b/mypy/typeshed/stdlib/importlib/__init__.pyi index 42401a00bdeb..1747b274136e 100644 --- a/mypy/typeshed/stdlib/importlib/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/__init__.pyi @@ -7,14 +7,14 @@ __all__ = ["__import__", "import_module", "invalidate_caches", "reload"] # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` def __import__( name: str, - globals: Mapping[str, object] | None = ..., - locals: Mapping[str, object] | None = ..., + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, fromlist: Sequence[str] = ..., - level: int = ..., + level: int = 0, ) -> ModuleType: ... # `importlib.import_module` return type should be kept the same as `builtins.__import__` -def import_module(name: str, package: str | None = ...) -> ModuleType: ... -def find_loader(name: str, path: str | None = ...) -> Loader | None: ... +def import_module(name: str, package: str | None = None) -> ModuleType: ... +def find_loader(name: str, path: str | None = None) -> Loader | None: ... def invalidate_caches() -> None: ... def reload(module: ModuleType) -> ModuleType: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index c961fb2e1f9e..78b79267d06e 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -52,7 +52,7 @@ class InspectLoader(Loader): def get_source(self, fullname: str) -> str | None: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod - def source_to_code(data: ReadableBuffer | str, path: str = ...) -> types.CodeType: ... + def source_to_code(data: ReadableBuffer | str, path: str = "") -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod @@ -85,8 +85,8 @@ class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): path: str def __init__(self, fullname: str, path: str) -> None: ... def get_data(self, path: str) -> bytes: ... - def get_filename(self, name: str | None = ...) -> str: ... - def load_module(self, name: str | None = ...) -> types.ModuleType: ... + def get_filename(self, name: str | None = None) -> str: ... + def load_module(self, name: str | None = None) -> types.ModuleType: ... class ResourceReader(metaclass=ABCMeta): @abstractmethod @@ -123,7 +123,7 @@ if sys.version_info >= (3, 9): @abstractmethod def open( self, - mode: OpenTextMode = ..., + mode: OpenTextMode = "r", buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., @@ -186,7 +186,7 @@ if sys.version_info >= (3, 9): @abstractmethod def read_bytes(self) -> bytes: ... @abstractmethod - def read_text(self, encoding: str | None = ...) -> str: ... + def read_text(self, encoding: str | None = None) -> str: ... class TraversableResources(ResourceReader): @abstractmethod diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index 6e253521bc0f..5aaefce87e3a 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -14,9 +14,9 @@ class ModuleSpec: name: str, loader: importlib.abc.Loader | None, *, - origin: str | None = ..., - loader_state: Any = ..., - is_package: bool | None = ..., + origin: str | None = None, + loader_state: Any = None, + is_package: bool | None = None, ) -> None: ... name: str loader: importlib.abc.Loader | None @@ -32,10 +32,10 @@ class ModuleSpec: class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -63,10 +63,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -92,10 +92,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... class PathFinder: @@ -114,10 +114,10 @@ class PathFinder: @classmethod def find_spec( - cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] @@ -136,13 +136,13 @@ class FileFinder(importlib.abc.PathEntryFinder): ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): - def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = ...) -> None: ... + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): def __init__(self, name: str, path: str) -> None: ... - def get_filename(self, name: str | None = ...) -> str: ... + def get_filename(self, name: str | None = None) -> str: ... def get_source(self, fullname: str) -> None: ... def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 01e35db5815e..cc93aaeca365 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -106,7 +106,7 @@ if sys.version_info >= (3, 10): ) -> EntryPoints: ... class PackagePath(pathlib.PurePosixPath): - def read_text(self, encoding: str = ...) -> str: ... + def read_text(self, encoding: str = "utf-8") -> str: ... def read_binary(self) -> bytes: ... def locate(self) -> PathLike[str]: ... # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: diff --git a/mypy/typeshed/stdlib/importlib/resources.pyi b/mypy/typeshed/stdlib/importlib/resources.pyi index 28ca107f4195..ba3d9b087754 100644 --- a/mypy/typeshed/stdlib/importlib/resources.pyi +++ b/mypy/typeshed/stdlib/importlib/resources.pyi @@ -23,9 +23,9 @@ else: Resource: TypeAlias = str | os.PathLike[Any] def open_binary(package: Package, resource: Resource) -> BinaryIO: ... -def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... +def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... def read_binary(package: Package, resource: Resource) -> bytes: ... -def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... +def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi index e9c08aeccf87..f988eb270a26 100644 --- a/mypy/typeshed/stdlib/importlib/util.pyi +++ b/mypy/typeshed/stdlib/importlib/util.pyi @@ -15,18 +15,18 @@ def resolve_name(name: str, package: str | None) -> str: ... MAGIC_NUMBER: bytes -def cache_from_source(path: str, debug_override: bool | None = ..., *, optimization: Any | None = ...) -> str: ... +def cache_from_source(path: str, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... def source_from_cache(path: str) -> str: ... def decode_source(source_bytes: ReadableBuffer) -> str: ... -def find_spec(name: str, package: str | None = ...) -> importlib.machinery.ModuleSpec | None: ... +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... def spec_from_loader( - name: str, loader: importlib.abc.Loader | None, *, origin: str | None = ..., is_package: bool | None = ... + name: str, loader: importlib.abc.Loader | None, *, origin: str | None = None, is_package: bool | None = None ) -> importlib.machinery.ModuleSpec | None: ... def spec_from_file_location( name: str, - location: StrOrBytesPath | None = ..., + location: StrOrBytesPath | None = None, *, - loader: importlib.abc.Loader | None = ..., + loader: importlib.abc.Loader | None = None, submodule_search_locations: list[str] | None = ..., ) -> importlib.machinery.ModuleSpec | None: ... def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index ad68aa93c894..3b82e0b0af2a 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -165,10 +165,10 @@ modulesbyfile: dict[str, Any] _GetMembersPredicate: TypeAlias = Callable[[Any], bool] _GetMembersReturn: TypeAlias = list[tuple[str, Any]] -def getmembers(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... +def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... if sys.version_info >= (3, 11): - def getmembers_static(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... def getmodulename(path: str) -> str | None: ... def ismodule(object: object) -> TypeGuard[ModuleType]: ... @@ -269,12 +269,12 @@ _SourceObjectType: TypeAlias = Union[ ] def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... -def getabsfile(object: _SourceObjectType, _filename: str | None = ...) -> str: ... +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> str | None: ... def getcomments(object: object) -> str | None: ... def getfile(object: _SourceObjectType) -> str: ... -def getmodule(object: object, _filename: str | None = ...) -> ModuleType | None: ... +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... def getsourcefile(object: _SourceObjectType) -> str | None: ... def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... def getsource(object: _SourceObjectType) -> str: ... @@ -290,21 +290,21 @@ if sys.version_info >= (3, 10): def signature( obj: _IntrospectableCallable, *, - follow_wrapped: bool = ..., - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> Signature: ... else: - def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = ...) -> Signature: ... + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... class _void: ... class _empty: ... class Signature: def __init__( - self, parameters: Sequence[Parameter] | None = ..., *, return_annotation: Any = ..., __validate_parameters__: bool = ... + self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True ) -> None: ... empty = _empty @property @@ -322,14 +322,14 @@ class Signature: cls: type[Self], obj: _IntrospectableCallable, *, - follow_wrapped: bool = ..., - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> Self: ... else: @classmethod - def from_callable(cls: type[Self], obj: _IntrospectableCallable, *, follow_wrapped: bool = ...) -> Self: ... + def from_callable(cls: type[Self], obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... def __eq__(self, other: object) -> bool: ... @@ -337,9 +337,9 @@ if sys.version_info >= (3, 10): def get_annotations( obj: Callable[..., object] | type[Any] | ModuleType, *, - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> dict[str, Any]: ... # The name is the same as the enum's name in CPython @@ -400,7 +400,7 @@ class BoundArguments: # TODO: The actual return type should be list[_ClassTreeItem] but mypy doesn't # seem to be supporting this at the moment: # _ClassTreeItem = list[_ClassTreeItem] | Tuple[type, Tuple[type, ...]] -def getclasstree(classes: list[type], unique: bool = ...) -> list[Any]: ... +def getclasstree(classes: list[type], unique: bool = False) -> list[Any]: ... def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> list[Any]: ... class Arguments(NamedTuple): @@ -436,15 +436,15 @@ class ArgInfo(NamedTuple): locals: dict[str, Any] def getargvalues(frame: FrameType) -> ArgInfo: ... -def formatannotation(annotation: object, base_module: str | None = ...) -> str: ... +def formatannotation(annotation: object, base_module: str | None = None) -> str: ... def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... if sys.version_info < (3, 11): def formatargspec( args: list[str], - varargs: str | None = ..., - varkw: str | None = ..., - defaults: tuple[Any, ...] | None = ..., + varargs: str | None = None, + varkw: str | None = None, + defaults: tuple[Any, ...] | None = None, kwonlyargs: Sequence[str] | None = ..., kwonlydefaults: Mapping[str, Any] | None = ..., annotations: Mapping[str, Any] = ..., @@ -476,7 +476,7 @@ class ClosureVars(NamedTuple): unbound: AbstractSet[str] def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... -def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = ...) -> Any: ... +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... # # The interpreter stack @@ -500,7 +500,7 @@ if sys.version_info >= (3, 11): code_context: list[str] | None, index: int | None, *, - positions: dis.Positions | None = ..., + positions: dis.Positions | None = None, ) -> Self: ... class _FrameInfo(NamedTuple): @@ -522,7 +522,7 @@ if sys.version_info >= (3, 11): code_context: list[str] | None, index: int | None, *, - positions: dis.Positions | None = ..., + positions: dis.Positions | None = None, ) -> Self: ... else: @@ -541,13 +541,13 @@ else: code_context: list[str] | None index: int | None # type: ignore[assignment] -def getframeinfo(frame: FrameType | TracebackType, context: int = ...) -> Traceback: ... -def getouterframes(frame: Any, context: int = ...) -> list[FrameInfo]: ... -def getinnerframes(tb: TracebackType, context: int = ...) -> list[FrameInfo]: ... +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... def getlineno(frame: FrameType) -> int: ... def currentframe() -> FrameType | None: ... -def stack(context: int = ...) -> list[FrameInfo]: ... -def trace(context: int = ...) -> list[FrameInfo]: ... +def stack(context: int = 1) -> list[FrameInfo]: ... +def trace(context: int = 1) -> list[FrameInfo]: ... # # Fetching attributes statically diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index c1889300f981..6e1b4be77b07 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -61,7 +61,7 @@ class IOBase(metaclass=abc.ABCMeta): def isatty(self) -> bool: ... def readable(self) -> bool: ... read: Callable[..., Any] - def readlines(self, __hint: int = ...) -> list[bytes]: ... + def readlines(self, __hint: int = -1) -> list[bytes]: ... def seek(self, __offset: int, __whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... @@ -69,7 +69,7 @@ class IOBase(metaclass=abc.ABCMeta): def writable(self) -> bool: ... write: Callable[..., Any] def writelines(self, __lines: Iterable[ReadableBuffer]) -> None: ... - def readline(self, __size: int | None = ...) -> bytes: ... + def readline(self, __size: int | None = -1) -> bytes: ... def __del__(self) -> None: ... @property def closed(self) -> bool: ... @@ -79,7 +79,7 @@ class RawIOBase(IOBase): def readall(self) -> bytes: ... def readinto(self, __buffer: WriteableBuffer) -> int | None: ... def write(self, __b: ReadableBuffer) -> int | None: ... - def read(self, __size: int = ...) -> bytes | None: ... + def read(self, __size: int = -1) -> bytes | None: ... class BufferedIOBase(IOBase): raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. @@ -99,7 +99,7 @@ class FileIO(RawIOBase, BinaryIO): @property def closefd(self) -> bool: ... def write(self, __b: ReadableBuffer) -> int: ... - def read(self, __size: int = ...) -> bytes: ... + def read(self, __size: int = -1) -> bytes: ... def __enter__(self: Self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): @@ -111,12 +111,12 @@ class BytesIO(BufferedIOBase, BinaryIO): def __enter__(self: Self) -> Self: ... def getvalue(self) -> bytes: ... def getbuffer(self) -> memoryview: ... - def read1(self, __size: int | None = ...) -> bytes: ... + def read1(self, __size: int | None = -1) -> bytes: ... class BufferedReader(BufferedIOBase, BinaryIO): def __enter__(self: Self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... - def peek(self, __size: int = ...) -> bytes: ... + def peek(self, __size: int = 0) -> bytes: ... class BufferedWriter(BufferedIOBase, BinaryIO): def __enter__(self: Self) -> Self: ... @@ -125,7 +125,7 @@ class BufferedWriter(BufferedIOBase, BinaryIO): class BufferedRandom(BufferedReader, BufferedWriter): def __enter__(self: Self) -> Self: ... - def seek(self, __target: int, __whence: int = ...) -> int: ... # stubtest needs this + def seek(self, __target: int, __whence: int = 0) -> int: ... # stubtest needs this class BufferedRWPair(BufferedIOBase): def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... @@ -141,7 +141,7 @@ class TextIOBase(IOBase): def write(self, __s: str) -> int: ... def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] def readline(self, __size: int = ...) -> str: ... # type: ignore[override] - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] def read(self, __size: int | None = ...) -> str: ... class TextIOWrapper(TextIOBase, TextIO): @@ -165,20 +165,20 @@ class TextIOWrapper(TextIOBase, TextIO): def reconfigure( self, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - line_buffering: bool | None = ..., - write_through: bool | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool | None = None, + write_through: bool | None = None, ) -> None: ... # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. def __enter__(self: Self) -> Self: ... def __iter__(self) -> Iterator[str]: ... # type: ignore[override] def __next__(self) -> str: ... # type: ignore[override] def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] - def readline(self, __size: int = ...) -> str: ... # type: ignore[override] - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] - def seek(self, __cookie: int, __whence: int = ...) -> int: ... # stubtest needs this + def readline(self, __size: int = -1) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] + def seek(self, __cookie: int, __whence: int = 0) -> int: ... # stubtest needs this class StringIO(TextIOWrapper): def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ... @@ -190,7 +190,7 @@ class StringIO(TextIOWrapper): class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... - def decode(self, input: ReadableBuffer | str, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... @property def newlines(self) -> str | tuple[str, ...] | None: ... def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 6580ba4f1ac4..1de945db5d30 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -16,7 +16,7 @@ _RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Int def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... def ip_network( - address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = ... + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True ) -> IPv4Network | IPv6Network: ... def ip_interface( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int] @@ -114,8 +114,8 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def prefixlen(self) -> int: ... def subnet_of(self: Self, other: Self) -> bool: ... def supernet_of(self: Self, other: Self) -> bool: ... - def subnets(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Iterator[Self]: ... - def supernet(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Self: ... + def subnets(self: Self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self: Self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... @property def with_hostmask(self) -> str: ... @property diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 3cc1bd00de79..a16827a3adb8 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -111,7 +111,7 @@ class takewhile(Iterator[_T], Generic[_T]): def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... -def tee(__iterable: Iterable[_T], __n: int = ...) -> tuple[Iterator[_T], ...]: ... +def tee(__iterable: Iterable[_T], __n: int = 2) -> tuple[Iterator[_T], ...]: ... class zip_longest(Iterator[_T_co], Generic[_T_co]): # one iterable (fillvalue doesn't matter) diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index 73bb5e8b4c1a..63e9718ee151 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -10,52 +10,52 @@ __all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", " def dumps( obj: Any, *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - cls: type[JSONEncoder] | None = ..., - indent: None | int | str = ..., - separators: tuple[str, str] | None = ..., - default: Callable[[Any], Any] | None = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, **kwds: Any, ) -> str: ... def dump( obj: Any, fp: SupportsWrite[str], *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - cls: type[JSONEncoder] | None = ..., - indent: None | int | str = ..., - separators: tuple[str, str] | None = ..., - default: Callable[[Any], Any] | None = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, **kwds: Any, ) -> None: ... def loads( s: str | bytes | bytearray, *, - cls: type[JSONDecoder] | None = ..., - object_hook: Callable[[dict[Any, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, ) -> Any: ... def load( fp: SupportsRead[str | bytes], *, - cls: type[JSONDecoder] | None = ..., - object_hook: Callable[[dict[Any, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, ) -> Any: ... def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/json/decoder.pyi b/mypy/typeshed/stdlib/json/decoder.pyi index 2060cf17dd05..8debfe6cd65a 100644 --- a/mypy/typeshed/stdlib/json/decoder.pyi +++ b/mypy/typeshed/stdlib/json/decoder.pyi @@ -21,12 +21,12 @@ class JSONDecoder: def __init__( self, *, - object_hook: Callable[[dict[str, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - strict: bool = ..., - object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = ..., + object_hook: Callable[[dict[str, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + strict: bool = True, + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, ) -> None: ... def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented - def raw_decode(self, s: str, idx: int = ...) -> tuple[Any, int]: ... + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... diff --git a/mypy/typeshed/stdlib/json/encoder.pyi b/mypy/typeshed/stdlib/json/encoder.pyi index 0444ae477a96..0c0d366eb7a2 100644 --- a/mypy/typeshed/stdlib/json/encoder.pyi +++ b/mypy/typeshed/stdlib/json/encoder.pyi @@ -24,15 +24,15 @@ class JSONEncoder: def __init__( self, *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - sort_keys: bool = ..., - indent: int | str | None = ..., - separators: tuple[str, str] | None = ..., - default: Callable[..., Any] | None = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + sort_keys: bool = False, + indent: int | str | None = None, + separators: tuple[str, str] | None = None, + default: Callable[..., Any] | None = None, ) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... - def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ... + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi index 45c9aeaa5691..9f6e4d6774ad 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -12,13 +12,13 @@ class Driver: grammar: Grammar logger: Logger convert: _Convert - def __init__(self, grammar: Grammar, convert: _Convert | None = ..., logger: Logger | None = ...) -> None: ... - def parse_tokens(self, tokens: Iterable[Any], debug: bool = ...) -> _NL: ... - def parse_stream_raw(self, stream: IO[str], debug: bool = ...) -> _NL: ... - def parse_stream(self, stream: IO[str], debug: bool = ...) -> _NL: ... - def parse_file(self, filename: StrPath, encoding: str | None = ..., debug: bool = ...) -> _NL: ... - def parse_string(self, text: str, debug: bool = ...) -> _NL: ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... + def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... def load_grammar( - gt: str = ..., gp: str | None = ..., save: bool = ..., force: bool = ..., logger: Logger | None = ... + gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None ) -> Grammar: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi index 6a07c4a4ad48..51eb671f4236 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -20,8 +20,8 @@ class Parser: stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None used_names: set[str] - def __init__(self, grammar: Grammar, convert: _Convert | None = ...) -> None: ... - def setup(self, start: int | None = ...) -> None: ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... def classify(self, type: int, value: str | None, context: _Context) -> int: ... def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi index 84ee7ae98bd0..d346739d4d58 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi @@ -11,7 +11,7 @@ class ParserGenerator: stream: IO[str] generator: Iterator[_TokenInfo] first: dict[str, dict[str, int]] - def __init__(self, filename: StrPath, stream: IO[str] | None = ...) -> None: ... + def __init__(self, filename: StrPath, stream: IO[str] | None = None) -> None: ... def make_grammar(self) -> PgenGrammar: ... def make_first(self, c: PgenGrammar, name: str) -> dict[int, int]: ... def make_label(self, c: PgenGrammar, label: str) -> int: ... @@ -26,13 +26,13 @@ class ParserGenerator: def parse_alt(self) -> tuple[NFAState, NFAState]: ... def parse_item(self) -> tuple[NFAState, NFAState]: ... def parse_atom(self) -> tuple[NFAState, NFAState]: ... - def expect(self, type: int, value: Any | None = ...) -> str: ... + def expect(self, type: int, value: Any | None = None) -> str: ... def gettoken(self) -> None: ... def raise_error(self, msg: str, *args: Any) -> NoReturn: ... class NFAState: arcs: list[tuple[str | None, NFAState]] - def addarc(self, next: NFAState, label: str | None = ...) -> None: ... + def addarc(self, next: NFAState, label: str | None = None) -> None: ... class DFAState: nfaset: dict[NFAState, Any] @@ -43,4 +43,4 @@ class DFAState: def unifystate(self, old: DFAState, new: DFAState) -> None: ... def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] -def generate_grammar(filename: StrPath = ...) -> PgenGrammar: ... +def generate_grammar(filename: StrPath = "Grammar.txt") -> PgenGrammar: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pygram.pyi b/mypy/typeshed/stdlib/lib2to3/pygram.pyi index bf96a55c41b3..00fdbd1a124e 100644 --- a/mypy/typeshed/stdlib/lib2to3/pygram.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,3 +1,4 @@ +import sys from lib2to3.pgen2.grammar import Grammar class Symbols: @@ -110,4 +111,6 @@ class pattern_symbols(Symbols): python_grammar: Grammar python_grammar_no_print_statement: Grammar +if sys.version_info >= (3, 8): + python_grammar_no_print_and_exec_statement: Grammar pattern_grammar: Grammar diff --git a/mypy/typeshed/stdlib/lib2to3/pytree.pyi b/mypy/typeshed/stdlib/lib2to3/pytree.pyi index 4db9ab99ba44..5cf7db146e46 100644 --- a/mypy/typeshed/stdlib/lib2to3/pytree.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pytree.pyi @@ -43,9 +43,9 @@ class Node(Base): self, type: int, children: list[_NL], - context: Any | None = ..., - prefix: str | None = ..., - fixers_applied: list[Any] | None = ..., + context: Any | None = None, + prefix: str | None = None, + fixers_applied: list[Any] | None = None, ) -> None: ... def set_child(self, i: int, child: _NL) -> None: ... def insert_child(self, i: int, child: _NL) -> None: ... @@ -58,7 +58,7 @@ class Leaf(Base): value: str fixers_applied: list[Any] def __init__( - self, type: int, value: str, context: _Context | None = ..., prefix: str | None = ..., fixers_applied: list[Any] = ... + self, type: int, value: str, context: _Context | None = None, prefix: str | None = None, fixers_applied: list[Any] = ... ) -> None: ... def __unicode__(self) -> str: ... @@ -69,23 +69,23 @@ class BasePattern: content: str | None name: str | None def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns - def match(self, node: _NL, results: _Results | None = ...) -> bool: ... - def match_seq(self, nodes: list[_NL], results: _Results | None = ...) -> bool: ... + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: list[_NL], results: _Results | None = None) -> bool: ... def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): - def __init__(self, type: int | None = ..., content: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class NodePattern(BasePattern): wildcards: bool - def __init__(self, type: int | None = ..., content: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class WildcardPattern(BasePattern): min: int max: int - def __init__(self, content: str | None = ..., min: int = ..., max: int = ..., name: str | None = ...) -> None: ... + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... class NegatedPattern(BasePattern): - def __init__(self, content: str | None = ...) -> None: ... + def __init__(self, content: str | None = None) -> None: ... def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/refactor.pyi b/mypy/typeshed/stdlib/lib2to3/refactor.pyi index 3aaea0e519d9..f1d89679aee7 100644 --- a/mypy/typeshed/stdlib/lib2to3/refactor.pyi +++ b/mypy/typeshed/stdlib/lib2to3/refactor.pyi @@ -8,7 +8,7 @@ from .pgen2.grammar import Grammar _Driver: TypeAlias = Any # really lib2to3.driver.Driver _BottomMatcher: TypeAlias = Any # really lib2to3.btm_matcher.BottomMatcher -def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = ...) -> list[str]: ... +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... def get_fixers_from_package(pkg_name: str) -> list[str]: ... class FixerError(Exception): ... @@ -33,25 +33,25 @@ class RefactoringTool: bmi_pre_order: list[Any] bmi_post_order: list[Any] def __init__( - self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = ..., explicit: Container[str] | None = ... + self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None ) -> None: ... def get_fixers(self) -> tuple[list[Any], list[Any]]: ... def log_error(self, msg: str, *args: Any, **kwds: Any) -> NoReturn: ... def log_message(self, msg: str, *args: Any) -> None: ... def log_debug(self, msg: str, *args: Any) -> None: ... def print_output(self, old_text: str, new_text: str, filename: str, equal): ... - def refactor(self, items: Iterable[str], write: bool = ..., doctests_only: bool = ...) -> None: ... - def refactor_dir(self, dir_name: str, write: bool = ..., doctests_only: bool = ...) -> None: ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... def _read_python_source(self, filename: str) -> tuple[str, str]: ... - def refactor_file(self, filename: str, write: bool = ..., doctests_only: bool = ...) -> None: ... + def refactor_file(self, filename: str, write: bool = False, doctests_only: bool = False) -> None: ... def refactor_string(self, data: str, name: str): ... - def refactor_stdin(self, doctests_only: bool = ...) -> None: ... + def refactor_stdin(self, doctests_only: bool = False) -> None: ... def refactor_tree(self, tree, name: str) -> bool: ... def traverse_by(self, fixers, traversal) -> None: ... def processed_file( - self, new_text: str, filename: str, old_text: str | None = ..., write: bool = ..., encoding: str | None = ... + self, new_text: str, filename: str, old_text: str | None = None, write: bool = False, encoding: str | None = None ) -> None: ... - def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = ...) -> None: ... + def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = None) -> None: ... PS1: ClassVar[str] PS2: ClassVar[str] def refactor_docstring(self, input: str, filename: str) -> str: ... @@ -68,4 +68,6 @@ class MultiprocessingUnsupported(Exception): ... class MultiprocessRefactoringTool(RefactoringTool): queue: Any | None output_lock: Any | None - def refactor(self, items: Iterable[str], write: bool = ..., doctests_only: bool = ..., num_processes: int = ...) -> None: ... + def refactor( + self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1 + ) -> None: ... diff --git a/mypy/typeshed/stdlib/linecache.pyi b/mypy/typeshed/stdlib/linecache.pyi index df54fd80aea7..8e317dd38990 100644 --- a/mypy/typeshed/stdlib/linecache.pyi +++ b/mypy/typeshed/stdlib/linecache.pyi @@ -15,9 +15,9 @@ class _SourceLoader(Protocol): cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented -def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = ...) -> str: ... +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... def clearcache() -> None: ... -def getlines(filename: str, module_globals: _ModuleGlobals | None = ...) -> list[str]: ... -def checkcache(filename: str | None = ...) -> None: ... -def updatecache(filename: str, module_globals: _ModuleGlobals | None = ...) -> list[str]: ... +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def checkcache(filename: str | None = None) -> None: ... +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index 9a3ea65d1b8b..0b0dd9456e52 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -111,19 +111,19 @@ CHAR_MAX: int class Error(Exception): ... -def setlocale(category: int, locale: _str | Iterable[_str | None] | None = ...) -> _str: ... +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... def localeconv() -> Mapping[_str, int | _str | list[int]]: ... def nl_langinfo(__key: int) -> _str: ... def getdefaultlocale(envvars: tuple[_str, ...] = ...) -> tuple[_str | None, _str | None]: ... def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... -def getpreferredencoding(do_setlocale: bool = ...) -> _str: ... +def getpreferredencoding(do_setlocale: bool = True) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... def strcoll(__os1: _str, __os2: _str) -> int: ... def strxfrm(__string: _str) -> _str: ... -def format(percent: _str, value: float | Decimal, grouping: bool = ..., monetary: bool = ..., *additional: Any) -> _str: ... -def format_string(f: _str, val: Any, grouping: bool = ..., monetary: bool = ...) -> _str: ... -def currency(val: float | Decimal, symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... +def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: ... +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... def delocalize(string: _str) -> _str: ... def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 575fd8f9ee4b..231700653a32 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -64,7 +64,7 @@ if sys.version_info >= (3, 11): _SysExcInfoType: TypeAlias = Union[tuple[type[BaseException], BaseException, TracebackType | None], tuple[None, None, None]] _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException _ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] -_FilterType: TypeAlias = Filter | Callable[[LogRecord], int] +_FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] _Level: TypeAlias = int | str _FormatStyle: TypeAlias = Literal["%", "{", "$"] @@ -106,7 +106,7 @@ class Logger(Filterer): disabled: bool # undocumented root: ClassVar[RootLogger] # undocumented manager: Manager # undocumented - def __init__(self, name: str, level: _Level = ...) -> None: ... + def __init__(self, name: str, level: _Level = 0) -> None: ... def setLevel(self, level: _Level) -> None: ... def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... @@ -161,7 +161,7 @@ class Logger(Filterer): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -190,10 +190,10 @@ class Logger(Filterer): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., - stacklevel: int = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + stacklevel: int = 1, ) -> None: ... # undocumented else: def debug( @@ -257,7 +257,7 @@ class Logger(Filterer): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., extra: Mapping[str, object] | None = ..., ) -> None: ... @@ -266,17 +266,17 @@ class Logger(Filterer): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, ) -> None: ... # undocumented fatal = critical def addHandler(self, hdlr: Handler) -> None: ... def removeHandler(self, hdlr: Handler) -> None: ... if sys.version_info >= (3, 8): - def findCaller(self, stack_info: bool = ..., stacklevel: int = ...) -> tuple[str, int, str, str | None]: ... + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... else: - def findCaller(self, stack_info: bool = ...) -> tuple[str, int, str, str | None]: ... + def findCaller(self, stack_info: bool = False) -> tuple[str, int, str, str | None]: ... def handle(self, record: LogRecord) -> None: ... def makeRecord( @@ -288,9 +288,9 @@ class Logger(Filterer): msg: object, args: _ArgsType, exc_info: _SysExcInfoType | None, - func: str | None = ..., - extra: Mapping[str, object] | None = ..., - sinfo: str | None = ..., + func: str | None = None, + extra: Mapping[str, object] | None = None, + sinfo: str | None = None, ) -> LogRecord: ... def hasHandlers(self) -> bool: ... def callHandlers(self, record: LogRecord) -> None: ... # undocumented @@ -309,7 +309,7 @@ class Handler(Filterer): formatter: Formatter | None # undocumented lock: threading.Lock | None # undocumented name: str | None # undocumented - def __init__(self, level: _Level = ...) -> None: ... + def __init__(self, level: _Level = 0) -> None: ... def get_name(self) -> str: ... # undocumented def set_name(self, name: str) -> None: ... # undocumented def createLock(self) -> None: ... @@ -338,22 +338,22 @@ class Formatter: if sys.version_info >= (3, 10): def __init__( self, - fmt: str | None = ..., - datefmt: str | None = ..., - style: _FormatStyle = ..., - validate: bool = ..., + fmt: str | None = None, + datefmt: str | None = None, + style: _FormatStyle = "%", + validate: bool = True, *, - defaults: Mapping[str, Any] | None = ..., + defaults: Mapping[str, Any] | None = None, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( - self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ..., validate: bool = ... + self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True ) -> None: ... else: - def __init__(self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ...) -> None: ... + def __init__(self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%") -> None: ... def format(self, record: LogRecord) -> str: ... - def formatTime(self, record: LogRecord, datefmt: str | None = ...) -> str: ... + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... def formatException(self, ei: _SysExcInfoType) -> str: ... def formatMessage(self, record: LogRecord) -> str: ... # undocumented def formatStack(self, stack_info: str) -> str: ... @@ -361,7 +361,7 @@ class Formatter: class BufferingFormatter: linefmt: Formatter - def __init__(self, linefmt: Formatter | None = ...) -> None: ... + def __init__(self, linefmt: Formatter | None = None) -> None: ... def formatHeader(self, records: Sequence[LogRecord]) -> str: ... def formatFooter(self, records: Sequence[LogRecord]) -> str: ... def format(self, records: Sequence[LogRecord]) -> str: ... @@ -369,7 +369,7 @@ class BufferingFormatter: class Filter: name: str # undocumented nlen: int # undocumented - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... def filter(self, record: LogRecord) -> bool: ... class LogRecord: @@ -407,8 +407,8 @@ class LogRecord: msg: object, args: _ArgsType | None, exc_info: _SysExcInfoType | None, - func: str | None = ..., - sinfo: str | None = ..., + func: str | None = None, + sinfo: str | None = None, ) -> None: ... def getMessage(self) -> str: ... # Allows setting contextual information on LogRecord objects as per the docs, see #7833 @@ -421,7 +421,7 @@ class LoggerAdapter(Generic[_L]): manager: Manager # undocumented if sys.version_info >= (3, 10): extra: Mapping[str, object] | None - def __init__(self, logger: _L, extra: Mapping[str, object] | None = ...) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... else: extra: Mapping[str, object] def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... @@ -482,7 +482,7 @@ class LoggerAdapter(Generic[_L]): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -559,7 +559,7 @@ class LoggerAdapter(Generic[_L]): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., extra: Mapping[str, object] | None = ..., **kwargs: object, @@ -593,16 +593,16 @@ class LoggerAdapter(Generic[_L]): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, ) -> None: ... # undocumented @property def name(self) -> str: ... # undocumented if sys.version_info >= (3, 11): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def getLogger(name: str | None = ...) -> Logger: ... +def getLogger(name: str | None = None) -> Logger: ... def getLoggerClass() -> type[Logger]: ... def getLogRecordFactory() -> Callable[..., LogRecord]: ... @@ -658,7 +658,7 @@ if sys.version_info >= (3, 8): def exception( msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -693,7 +693,11 @@ else: msg: object, *args: object, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Mapping[str, object] | None = ... ) -> None: ... def exception( - msg: object, *args: object, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Mapping[str, object] | None = ... + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = ..., + extra: Mapping[str, object] | None = ..., ) -> None: ... def log( level: int, @@ -706,7 +710,7 @@ else: fatal = critical -def disable(level: int = ...) -> None: ... +def disable(level: int = 50) -> None: ... def addLevelName(level: int, levelName: str) -> None: ... def getLevelName(level: _Level) -> Any: ... @@ -771,7 +775,7 @@ class StreamHandler(Handler, Generic[_StreamT]): stream: _StreamT # undocumented terminator: str @overload - def __init__(self: StreamHandler[TextIO], stream: None = ...) -> None: ... + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... @overload def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... def setStream(self, stream: _StreamT) -> _StreamT | None: ... @@ -786,10 +790,10 @@ class FileHandler(StreamHandler[TextIOWrapper]): if sys.version_info >= (3, 9): errors: str | None # undocumented def __init__( - self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... def _open(self) -> TextIOWrapper: ... # undocumented @@ -815,7 +819,7 @@ class PercentStyle: # undocumented validation_pattern: Pattern[str] _fmt: str if sys.version_info >= (3, 10): - def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = None) -> None: ... else: def __init__(self, fmt: str) -> None: ... diff --git a/mypy/typeshed/stdlib/logging/config.pyi b/mypy/typeshed/stdlib/logging/config.pyi index 12e222680d2e..f76f655a6196 100644 --- a/mypy/typeshed/stdlib/logging/config.pyi +++ b/mypy/typeshed/stdlib/logging/config.pyi @@ -49,18 +49,18 @@ def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... if sys.version_info >= (3, 10): def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, - defaults: dict[str, str] | None = ..., - disable_existing_loggers: bool = ..., - encoding: str | None = ..., + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, + encoding: str | None = None, ) -> None: ... else: def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, - defaults: dict[str, str] | None = ..., - disable_existing_loggers: bool = ..., + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, ) -> None: ... def valid_ident(s: str) -> Literal[True]: ... # undocumented -def listen(port: int = ..., verify: Callable[[bytes], bytes | None] | None = ...) -> Thread: ... +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... def stopListening() -> None: ... diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index f01c67d13fe9..7e0bfd705895 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -22,10 +22,10 @@ class WatchedFileHandler(FileHandler): ino: int # undocumented if sys.version_info >= (3, 9): def __init__( - self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... def _statstream(self) -> None: ... # undocumented def reopenIfNeeded(self) -> None: ... @@ -35,10 +35,10 @@ class BaseRotatingHandler(FileHandler): rotator: Callable[[str, str], None] | None if sys.version_info >= (3, 9): def __init__( - self, filename: StrPath, mode: str, encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str, encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False) -> None: ... def rotation_filename(self, default_name: str) -> str: ... def rotate(self, source: str, dest: str) -> None: ... @@ -50,22 +50,22 @@ class RotatingFileHandler(BaseRotatingHandler): def __init__( self, filename: StrPath, - mode: str = ..., - maxBytes: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - errors: str | None = ..., + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + errors: str | None = None, ) -> None: ... else: def __init__( self, filename: StrPath, - mode: str = ..., - maxBytes: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, ) -> None: ... def doRollover(self) -> None: ... @@ -85,26 +85,26 @@ class TimedRotatingFileHandler(BaseRotatingHandler): def __init__( self, filename: StrPath, - when: str = ..., - interval: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - utc: bool = ..., - atTime: datetime.time | None = ..., - errors: str | None = ..., + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + errors: str | None = None, ) -> None: ... else: def __init__( self, filename: StrPath, - when: str = ..., - interval: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - utc: bool = ..., - atTime: datetime.time | None = ..., + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, ) -> None: ... def doRollover(self) -> None: ... @@ -123,7 +123,7 @@ class SocketHandler(Handler): retryFactor: float # undocumented retryMax: float # undocumented def __init__(self, host: str, port: int | None) -> None: ... - def makeSocket(self, timeout: float = ...) -> socket: ... # timeout is undocumented + def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented def makePickle(self, record: LogRecord) -> bytes: ... def send(self, s: ReadableBuffer) -> None: ... def createSocket(self) -> None: ... @@ -177,7 +177,7 @@ class SysLogHandler(Handler): priority_names: ClassVar[dict[str, int]] # undocumented facility_names: ClassVar[dict[str, int]] # undocumented priority_map: ClassVar[dict[str, str]] # undocumented - def __init__(self, address: tuple[str, int] | str = ..., facility: int = ..., socktype: SocketKind | None = ...) -> None: ... + def __init__(self, address: tuple[str, int] | str = ..., facility: int = 1, socktype: SocketKind | None = None) -> None: ... if sys.version_info >= (3, 11): def createSocket(self) -> None: ... @@ -185,7 +185,7 @@ class SysLogHandler(Handler): def mapPriority(self, levelName: str) -> str: ... class NTEventLogHandler(Handler): - def __init__(self, appname: str, dllname: str | None = ..., logtype: str = ...) -> None: ... + def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... def getEventCategory(self, record: LogRecord) -> int: ... # TODO correct return value? def getEventType(self, record: LogRecord) -> int: ... @@ -208,9 +208,9 @@ class SMTPHandler(Handler): fromaddr: str, toaddrs: str | list[str], subject: str, - credentials: tuple[str, str] | None = ..., - secure: tuple[()] | tuple[str] | tuple[str, str] | None = ..., - timeout: float = ..., + credentials: tuple[str, str] | None = None, + secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, + timeout: float = 5.0, ) -> None: ... def getSubject(self, record: LogRecord) -> str: ... @@ -224,7 +224,7 @@ class MemoryHandler(BufferingHandler): flushLevel: int # undocumented target: Handler | None # undocumented flushOnClose: bool # undocumented - def __init__(self, capacity: int, flushLevel: int = ..., target: Handler | None = ..., flushOnClose: bool = ...) -> None: ... + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... def setTarget(self, target: Handler | None) -> None: ... class HTTPHandler(Handler): @@ -238,10 +238,10 @@ class HTTPHandler(Handler): self, host: str, url: str, - method: str = ..., - secure: bool = ..., - credentials: tuple[str, str] | None = ..., - context: ssl.SSLContext | None = ..., + method: str = "GET", + secure: bool = False, + credentials: tuple[str, str] | None = None, + context: ssl.SSLContext | None = None, ) -> None: ... def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... if sys.version_info >= (3, 9): @@ -257,7 +257,7 @@ class QueueListener: handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: SimpleQueue[Any] | Queue[Any] # undocumented - def __init__(self, queue: SimpleQueue[Any] | Queue[Any], *handlers: Handler, respect_handler_level: bool = ...) -> None: ... + def __init__(self, queue: SimpleQueue[Any] | Queue[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... def start(self) -> None: ... diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index 9d75c627f76d..2feb28a8e743 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -83,7 +83,7 @@ PRESET_EXTREME: int # v big number @final class LZMADecompressor: def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... - def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property def check(self) -> int: ... @property @@ -107,91 +107,91 @@ class LZMAError(Exception): ... class LZMAFile(io.BufferedIOBase, IO[bytes]): def __init__( self, - filename: _PathOrFile | None = ..., - mode: str = ..., + filename: _PathOrFile | None = None, + mode: str = "r", *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, ) -> None: ... def __enter__(self: Self) -> Self: ... - def peek(self, size: int = ...) -> bytes: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... - def readline(self, size: int | None = ...) -> bytes: ... + def peek(self, size: int = -1) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = -1) -> bytes: ... def write(self, data: ReadableBuffer) -> int: ... - def seek(self, offset: int, whence: int = ...) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... @overload def open( filename: _PathOrFile, - mode: Literal["r", "rb"] = ..., + mode: Literal["r", "rb"] = "rb", *, - format: int | None = ..., - check: Literal[-1] = ..., - preset: None = ..., - filters: _FilterChain | None = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> LZMAFile: ... @overload def open( filename: _PathOrFile, mode: _OpenBinaryWritingMode, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> LZMAFile: ... @overload def open( filename: StrOrBytesPath, mode: Literal["rt"], *, - format: int | None = ..., - check: Literal[-1] = ..., - preset: None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath, mode: _OpenTextWritingMode, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: _PathOrFile, mode: str, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> LZMAFile | TextIO: ... def compress( - data: ReadableBuffer, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... + data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None ) -> bytes: ... def decompress( - data: ReadableBuffer, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ... + data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None ) -> bytes: ... def is_check_supported(__check_id: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 29cea5cadbb0..2fe9060e7b7c 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -50,9 +50,9 @@ class Mailbox(Generic[_MessageT]): _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... @overload - def __init__(self, path: StrPath, factory: None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: ... @abstractmethod @@ -62,7 +62,7 @@ class Mailbox(Generic[_MessageT]): @abstractmethod def __setitem__(self, key: str, message: _MessageData) -> None: ... @overload - def get(self, key: str, default: None = ...) -> _MessageT | None: ... + def get(self, key: str, default: None = None) -> _MessageT | None: ... @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... def __getitem__(self, key: str) -> _MessageT: ... @@ -88,11 +88,11 @@ class Mailbox(Generic[_MessageT]): def __len__(self) -> int: ... def clear(self) -> None: ... @overload - def pop(self, key: str, default: None = ...) -> _MessageT | None: ... + def pop(self, key: str, default: None = None) -> _MessageT | None: ... @overload - def pop(self, key: str, default: _T = ...) -> _MessageT | _T: ... + def pop(self, key: str, default: _T) -> _MessageT | _T: ... def popitem(self) -> tuple[str, _MessageT]: ... - def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = ...) -> None: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... @abstractmethod def flush(self) -> None: ... @abstractmethod @@ -105,10 +105,9 @@ class Mailbox(Generic[_MessageT]): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class Maildir(Mailbox[MaildirMessage]): - colon: str def __init__( - self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True ) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... @@ -144,18 +143,18 @@ class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): class _mboxMMDF(_singlefileMailbox[_MessageT]): def get_message(self, key: str) -> _MessageT: ... - def get_file(self, key: str, from_: bool = ...) -> _PartialFile[bytes]: ... - def get_bytes(self, key: str, from_: bool = ...) -> bytes: ... - def get_string(self, key: str, from_: bool = ...) -> str: ... + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... + def get_bytes(self, key: str, from_: bool = False) -> bytes: ... + def get_string(self, key: str, from_: bool = False) -> str: ... class mbox(_mboxMMDF[mboxMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... class MH(Mailbox[MHMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... def __setitem__(self, key: str, message: _MessageData) -> None: ... @@ -178,14 +177,14 @@ class MH(Mailbox[MHMessage]): def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... def get_message(self, key: str) -> BabylMessage: ... def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> IO[bytes]: ... def get_labels(self) -> list[str]: ... class Message(email.message.Message): - def __init__(self, message: _MessageData | None = ...) -> None: ... + def __init__(self, message: _MessageData | None = None) -> None: ... class MaildirMessage(Message): def get_subdir(self) -> str: ... @@ -201,7 +200,7 @@ class MaildirMessage(Message): class _mboxMMDFMessage(Message): def get_from(self) -> str: ... - def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = ...) -> None: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... def get_flags(self) -> str: ... def set_flags(self, flags: Iterable[str]) -> None: ... def add_flag(self, flag: str) -> None: ... @@ -227,14 +226,14 @@ class BabylMessage(Message): class MMDFMessage(_mboxMMDFMessage): ... class _ProxyFile(Generic[AnyStr]): - def __init__(self, f: IO[AnyStr], pos: int | None = ...) -> None: ... - def read(self, size: int | None = ...) -> AnyStr: ... - def read1(self, size: int | None = ...) -> AnyStr: ... - def readline(self, size: int | None = ...) -> AnyStr: ... - def readlines(self, sizehint: int | None = ...) -> list[AnyStr]: ... + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... + def read(self, size: int | None = None) -> AnyStr: ... + def read1(self, size: int | None = None) -> AnyStr: ... + def readline(self, size: int | None = None) -> AnyStr: ... + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... def __iter__(self) -> Iterator[AnyStr]: ... def tell(self) -> int: ... - def seek(self, offset: int, whence: int = ...) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... @@ -248,7 +247,7 @@ class _ProxyFile(Generic[AnyStr]): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class _PartialFile(_ProxyFile[AnyStr]): - def __init__(self, f: IO[AnyStr], start: int | None = ..., stop: int | None = ...) -> None: ... + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... class Error(Exception): ... class NoSuchMailboxError(Error): ... diff --git a/mypy/typeshed/stdlib/mailcap.pyi b/mypy/typeshed/stdlib/mailcap.pyi index e1637ad6e7be..5905f5826bf7 100644 --- a/mypy/typeshed/stdlib/mailcap.pyi +++ b/mypy/typeshed/stdlib/mailcap.pyi @@ -6,6 +6,6 @@ _Cap: TypeAlias = dict[str, str | int] __all__ = ["getcaps", "findmatch"] def findmatch( - caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = ..., filename: str = ..., plist: Sequence[str] = ... + caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = ... ) -> tuple[str | None, _Cap | None]: ... def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index d46d9c10483d..da5d1a95a6f6 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -27,7 +27,7 @@ _Marshallable: TypeAlias = Union[ ReadableBuffer, ] -def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = ...) -> None: ... +def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = 4) -> None: ... def load(__file: SupportsRead[bytes]) -> Any: ... -def dumps(__value: _Marshallable, __version: int = ...) -> bytes: ... +def dumps(__value: _Marshallable, __version: int = 4) -> bytes: ... def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi index ca30acd7e97d..231964f397db 100644 --- a/mypy/typeshed/stdlib/math.pyi +++ b/mypy/typeshed/stdlib/math.pyi @@ -91,8 +91,8 @@ def isclose( a: _SupportsFloatOrIndex, b: _SupportsFloatOrIndex, *, - rel_tol: _SupportsFloatOrIndex = ..., - abs_tol: _SupportsFloatOrIndex = ..., + rel_tol: _SupportsFloatOrIndex = 1e-09, + abs_tol: _SupportsFloatOrIndex = 0.0, ) -> bool: ... def isinf(__x: _SupportsFloatOrIndex) -> bool: ... def isfinite(__x: _SupportsFloatOrIndex) -> bool: ... @@ -116,15 +116,15 @@ if sys.version_info >= (3, 9): def nextafter(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): - def perm(__n: SupportsIndex, __k: SupportsIndex | None = ...) -> int: ... + def perm(__n: SupportsIndex, __k: SupportsIndex | None = None) -> int: ... def pow(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): @overload - def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = ...) -> int: ... # type: ignore[misc] + def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = 1) -> int: ... # type: ignore[misc] @overload - def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = ...) -> float: ... + def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = 1) -> float: ... def radians(__x: _SupportsFloatOrIndex) -> float: ... def remainder(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi index c2b6ff20281a..fd3908680009 100644 --- a/mypy/typeshed/stdlib/mimetypes.pyi +++ b/mypy/typeshed/stdlib/mimetypes.pyi @@ -20,16 +20,16 @@ __all__ = [ ] if sys.version_info >= (3, 8): - def guess_type(url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... else: - def guess_type(url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(url: str, strict: bool = True) -> tuple[str | None, str | None]: ... -def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ... -def guess_extension(type: str, strict: bool = ...) -> str | None: ... -def init(files: Sequence[str] | None = ...) -> None: ... +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... +def guess_extension(type: str, strict: bool = True) -> str | None: ... +def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... -def add_type(type: str, ext: str, strict: bool = ...) -> None: ... +def add_type(type: str, ext: str, strict: bool = True) -> None: ... inited: bool knownfiles: list[str] @@ -43,15 +43,15 @@ class MimeTypes: encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] - def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = ...) -> None: ... - def guess_extension(self, type: str, strict: bool = ...) -> str | None: ... + def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = True) -> None: ... + def guess_extension(self, type: str, strict: bool = True) -> str | None: ... if sys.version_info >= (3, 8): - def guess_type(self, url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... else: - def guess_type(self, url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(self, url: str, strict: bool = True) -> tuple[str | None, str | None]: ... - def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ... - def read(self, filename: str, strict: bool = ...) -> None: ... - def readfp(self, fp: IO[str], strict: bool = ...) -> None: ... + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... + def read(self, filename: str, strict: bool = True) -> None: ... + def readfp(self, fp: IO[str], strict: bool = True) -> None: ... if sys.platform == "win32": - def read_windows_registry(self, strict: bool = ...) -> None: ... + def read_windows_registry(self, strict: bool = True) -> None: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 30084b85bc51..273cd0c6f4d4 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer, Self, Unused from collections.abc import Iterable, Iterator, Sized from typing import NoReturn, overload @@ -74,7 +74,7 @@ class mmap(Iterable[int], Sized): # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... if sys.version_info >= (3, 8) and sys.platform != "win32": MADV_NORMAL: int diff --git a/mypy/typeshed/stdlib/modulefinder.pyi b/mypy/typeshed/stdlib/modulefinder.pyi index caed7efadccc..6f1917644b06 100644 --- a/mypy/typeshed/stdlib/modulefinder.pyi +++ b/mypy/typeshed/stdlib/modulefinder.pyi @@ -20,10 +20,9 @@ replacePackageMap: dict[str, str] # undocumented def ReplacePackage(oldname: str, newname: str) -> None: ... class Module: # undocumented - def __init__(self, name: str, file: str | None = ..., path: str | None = ...) -> None: ... + def __init__(self, name: str, file: str | None = None, path: str | None = None) -> None: ... class ModuleFinder: - modules: dict[str, Module] path: list[str] # undocumented badmodules: dict[str, dict[str, int]] # undocumented @@ -35,16 +34,16 @@ class ModuleFinder: if sys.version_info >= (3, 8): def __init__( self, - path: list[str] | None = ..., - debug: int = ..., - excludes: Container[str] | None = ..., - replace_paths: Sequence[tuple[str, str]] | None = ..., + path: list[str] | None = None, + debug: int = 0, + excludes: Container[str] | None = None, + replace_paths: Sequence[tuple[str, str]] | None = None, ) -> None: ... else: def __init__( self, - path: list[str] | None = ..., - debug: int = ..., + path: list[str] | None = None, + debug: int = 0, excludes: Container[str] = ..., replace_paths: Sequence[tuple[str, str]] = ..., ) -> None: ... @@ -55,12 +54,12 @@ class ModuleFinder: def run_script(self, pathname: str) -> None: ... def load_file(self, pathname: str) -> None: ... # undocumented def import_hook( - self, name: str, caller: Module | None = ..., fromlist: list[str] | None = ..., level: int = ... + self, name: str, caller: Module | None = None, fromlist: list[str] | None = None, level: int = -1 ) -> Module | None: ... # undocumented - def determine_parent(self, caller: Module | None, level: int = ...) -> Module | None: ... # undocumented + def determine_parent(self, caller: Module | None, level: int = -1) -> Module | None: ... # undocumented def find_head_package(self, parent: Module, name: str) -> tuple[Module, str]: ... # undocumented def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented - def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = ...) -> None: ... # undocumented + def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = 0) -> None: ... # undocumented def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented def import_module(self, partname: str, fqname: str, parent: Module) -> Module | None: ... # undocumented def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: tuple[str, str, str]) -> Module: ... # undocumented @@ -69,7 +68,7 @@ class ModuleFinder: def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented def add_module(self, fqname: str) -> Module: ... # undocumented def find_module( - self, name: str, path: str | None, parent: Module | None = ... + self, name: str, path: str | None, parent: Module | None = None ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented def report(self) -> None: ... def any_missing(self) -> list[str]: ... # undocumented diff --git a/mypy/typeshed/stdlib/msilib/__init__.pyi b/mypy/typeshed/stdlib/msilib/__init__.pyi index 0e18350b226e..9f7367d152ba 100644 --- a/mypy/typeshed/stdlib/msilib/__init__.pyi +++ b/mypy/typeshed/stdlib/msilib/__init__.pyi @@ -24,7 +24,6 @@ if sys.platform == "win32": knownbits: Literal[0x3FFF] class Table: - name: str fields: list[tuple[int, str, int]] def __init__(self, name: str) -> None: ... @@ -50,7 +49,6 @@ if sys.platform == "win32": def gen_uuid() -> str: ... class CAB: - name: str files: list[tuple[str, str]] filenames: set[str] @@ -62,7 +60,6 @@ if sys.platform == "win32": _directories: set[str] class Directory: - db: _Database cab: CAB basedir: str @@ -82,28 +79,26 @@ if sys.platform == "win32": physical: str, _logical: str, default: str, - componentflags: int | None = ..., + componentflags: int | None = None, ) -> None: ... def start_component( self, - component: str | None = ..., - feature: Feature | None = ..., - flags: int | None = ..., - keyfile: str | None = ..., - uuid: str | None = ..., + component: str | None = None, + feature: Feature | None = None, + flags: int | None = None, + keyfile: str | None = None, + uuid: str | None = None, ) -> None: ... def make_short(self, file: str) -> str: ... - def add_file(self, file: str, src: str | None = ..., version: str | None = ..., language: str | None = ...) -> str: ... - def glob(self, pattern: str, exclude: Container[str] | None = ...) -> list[str]: ... + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... def remove_pyc(self) -> None: ... class Binary: - name: str def __init__(self, fname: str) -> None: ... class Feature: - id: str def __init__( self, @@ -112,31 +107,28 @@ if sys.platform == "win32": title: str, desc: str, display: int, - level: int = ..., - parent: Feature | None = ..., - directory: str | None = ..., - attributes: int = ..., + level: int = 1, + parent: Feature | None = None, + directory: str | None = None, + attributes: int = 0, ) -> None: ... def set_current(self) -> None: ... class Control: - dlg: Dialog name: str def __init__(self, dlg: Dialog, name: str) -> None: ... - def event(self, event: str, argument: str, condition: str = ..., ordering: int | None = ...) -> None: ... + def event(self, event: str, argument: str, condition: str = "1", ordering: int | None = None) -> None: ... def mapping(self, event: str, attribute: str) -> None: ... def condition(self, action: str, condition: str) -> None: ... class RadioButtonGroup(Control): - property: str index: int def __init__(self, dlg: Dialog, name: str, property: str) -> None: ... - def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = ...) -> None: ... + def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = None) -> None: ... class Dialog: - db: _Database name: str x: int diff --git a/mypy/typeshed/stdlib/msilib/sequence.pyi b/mypy/typeshed/stdlib/msilib/sequence.pyi index 9cc1e0eaec01..b8af09f46e65 100644 --- a/mypy/typeshed/stdlib/msilib/sequence.pyi +++ b/mypy/typeshed/stdlib/msilib/sequence.pyi @@ -2,7 +2,6 @@ import sys from typing_extensions import TypeAlias if sys.platform == "win32": - _SequenceType: TypeAlias = list[tuple[str, str | None, int]] AdminExecuteSequence: _SequenceType diff --git a/mypy/typeshed/stdlib/msilib/text.pyi b/mypy/typeshed/stdlib/msilib/text.pyi index 879429ecea85..1353cf8a2392 100644 --- a/mypy/typeshed/stdlib/msilib/text.pyi +++ b/mypy/typeshed/stdlib/msilib/text.pyi @@ -1,7 +1,6 @@ import sys if sys.platform == "win32": - ActionText: list[tuple[str, str, str | None]] UIText: list[tuple[str, str | None]] diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index 5036f0ef222b..392e3168aaaa 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -12,7 +12,7 @@ __all__ = ["Client", "Listener", "Pipe", "wait"] _Address: TypeAlias = Union[str, tuple[str, int]] class _ConnectionBase: - def __init__(self, handle: SupportsIndex, readable: bool = ..., writable: bool = ...) -> None: ... + def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: ... # undocumented @property @@ -21,12 +21,12 @@ class _ConnectionBase: def writable(self) -> bool: ... # undocumented def fileno(self) -> int: ... def close(self) -> None: ... - def send_bytes(self, buf: ReadableBuffer, offset: int = ..., size: int | None = ...) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... def send(self, obj: Any) -> None: ... - def recv_bytes(self, maxlength: int | None = ...) -> bytes: ... - def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... + def recv_bytes(self, maxlength: int | None = None) -> bytes: ... + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... def recv(self) -> Any: ... - def poll(self, timeout: float | None = ...) -> bool: ... + def poll(self, timeout: float | None = 0.0) -> bool: ... def __enter__(self: Self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None @@ -39,7 +39,7 @@ if sys.platform == "win32": class Listener: def __init__( - self, address: _Address | None = ..., family: str | None = ..., backlog: int = ..., authkey: bytes | None = ... + self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... @@ -55,15 +55,15 @@ class Listener: def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... def answer_challenge(connection: Connection, authkey: bytes) -> None: ... def wait( - object_list: Iterable[Connection | socket.socket | int], timeout: float | None = ... + object_list: Iterable[Connection | socket.socket | int], timeout: float | None = None ) -> list[Connection | socket.socket | int]: ... -def Client(address: _Address, family: str | None = ..., authkey: bytes | None = ...) -> Connection: ... +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection: ... # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. if sys.platform != "win32": - def Pipe(duplex: bool = ...) -> tuple[Connection, Connection]: ... + def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... else: - def Pipe(duplex: bool = ...) -> tuple[PipeConnection, PipeConnection]: ... + def Pipe(duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index 6622dca19ade..c498649a7b61 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -52,28 +52,28 @@ class BaseContext: # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. if sys.platform != "win32": - def Pipe(self, duplex: bool = ...) -> tuple[Connection, Connection]: ... + def Pipe(self, duplex: bool = True) -> tuple[Connection, Connection]: ... else: - def Pipe(self, duplex: bool = ...) -> tuple[PipeConnection, PipeConnection]: ... + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... def Barrier( - self, parties: int, action: Callable[..., object] | None = ..., timeout: float | None = ... + self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None ) -> synchronize.Barrier: ... - def BoundedSemaphore(self, value: int = ...) -> synchronize.BoundedSemaphore: ... - def Condition(self, lock: _LockLike | None = ...) -> synchronize.Condition: ... + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... def Event(self) -> synchronize.Event: ... def Lock(self) -> synchronize.Lock: ... def RLock(self) -> synchronize.RLock: ... - def Semaphore(self, value: int = ...) -> synchronize.Semaphore: ... - def Queue(self, maxsize: int = ...) -> queues.Queue[Any]: ... - def JoinableQueue(self, maxsize: int = ...) -> queues.JoinableQueue[Any]: ... + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... def Pool( self, - processes: int | None = ..., - initializer: Callable[..., object] | None = ..., + processes: int | None = None, + initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ..., - maxtasksperchild: int | None = ..., + maxtasksperchild: int | None = None, ) -> _Pool: ... @overload def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... @@ -86,34 +86,34 @@ class BaseContext: @overload def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> _CT: ... @overload - def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = ...) -> SynchronizedBase[_CT]: ... + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[_CT]: ... @overload - def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = ...) -> SynchronizedBase[Any]: ... + def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[Any]: ... @overload - def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ...) -> Any: ... + def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... @overload def Array(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]) -> _CT: ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = ... + self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True ) -> SynchronizedArray[_CT]: ... @overload def Array( - self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = ... + self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True ) -> SynchronizedArray[Any]: ... @overload def Array( - self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = ... + self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... - def log_to_stderr(self, level: _LoggingLevel | None = ...) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: list[str]) -> None: ... if sys.platform != "win32": @overload - def get_context(self, method: None = ...) -> DefaultContext: ... + def get_context(self, method: None = None) -> DefaultContext: ... @overload def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... @overload @@ -124,17 +124,17 @@ class BaseContext: def get_context(self, method: str) -> BaseContext: ... else: @overload - def get_context(self, method: None = ...) -> DefaultContext: ... + def get_context(self, method: None = None) -> DefaultContext: ... @overload def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... @overload def get_context(self, method: str) -> BaseContext: ... @overload - def get_start_method(self, allow_none: Literal[False] = ...) -> str: ... + def get_start_method(self, allow_none: Literal[False] = False) -> str: ... @overload def get_start_method(self, allow_none: bool) -> str | None: ... - def set_start_method(self, method: str | None, force: bool = ...) -> None: ... + def set_start_method(self, method: str | None, force: bool = False) -> None: ... @property def reducer(self) -> str: ... @reducer.setter @@ -149,7 +149,7 @@ class Process(BaseProcess): class DefaultContext(BaseContext): Process: ClassVar[type[Process]] def __init__(self, context: BaseContext) -> None: ... - def get_start_method(self, allow_none: bool = ...) -> str: ... + def get_start_method(self, allow_none: bool = False) -> str: ... def get_all_start_methods(self) -> list[str]: ... if sys.version_info < (3, 8): __all__: ClassVar[list[str]] diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi index 5d289c058e03..5b2a33772de6 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi @@ -47,9 +47,9 @@ class DummyProcess(threading.Thread): def exitcode(self) -> Literal[0] | None: ... def __init__( self, - group: Any = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ..., ) -> None: ... @@ -65,11 +65,13 @@ class Value: _typecode: Any _value: Any value: Any - def __init__(self, typecode: Any, value: Any, lock: Any = ...) -> None: ... + def __init__(self, typecode: Any, value: Any, lock: Any = True) -> None: ... -def Array(typecode: Any, sequence: Sequence[Any], lock: Any = ...) -> array.array[Any]: ... +def Array(typecode: Any, sequence: Sequence[Any], lock: Any = True) -> array.array[Any]: ... def Manager() -> Any: ... -def Pool(processes: int | None = ..., initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ...) -> Any: ... +def Pool( + processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... +) -> Any: ... def active_children() -> list[Any]: ... current_process = threading.current_thread diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi index fd909d0d32e1..1630472b3b06 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi @@ -23,7 +23,7 @@ class Connection: ) -> None: ... def __init__(self, _in: Any, _out: Any) -> None: ... def close(self) -> None: ... - def poll(self, timeout: float = ...) -> bool: ... + def poll(self, timeout: float = 0.0) -> bool: ... class Listener: _backlog_queue: Queue[Any] | None @@ -33,9 +33,9 @@ class Listener: def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def __init__(self, address: _Address | None = ..., family: int | None = ..., backlog: int = ...) -> None: ... + def __init__(self, address: _Address | None = None, family: int | None = None, backlog: int = 1) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... def Client(address: _Address) -> Connection: ... -def Pipe(duplex: bool = ...) -> tuple[Connection, Connection]: ... +def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi index 10269dfbba29..df435f00ebe7 100644 --- a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -1,4 +1,4 @@ -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, Unused from collections.abc import Sequence from struct import Struct from typing import Any @@ -18,8 +18,8 @@ def main( listener_fd: int | None, alive_r: FileDescriptorLike, preload: Sequence[str], - main_path: str | None = ..., - sys_path: object | None = ..., + main_path: str | None = None, + sys_path: Unused = None, ) -> None: ... def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/heap.pyi b/mypy/typeshed/stdlib/multiprocessing/heap.pyi index 9c8f55604a64..b5e2ced5e8ee 100644 --- a/mypy/typeshed/stdlib/multiprocessing/heap.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/heap.pyi @@ -15,7 +15,7 @@ class Arena: def __init__(self, size: int) -> None: ... else: fd: int - def __init__(self, size: int, fd: int = ...) -> None: ... + def __init__(self, size: int, fd: int = -1) -> None: ... _Block: TypeAlias = tuple[Arena, int, int] diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index 2630e5864520..1696714d187b 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -47,11 +47,11 @@ class BaseProxy: self, token: Any, serializer: str, - manager: Any = ..., - authkey: AnyStr | None = ..., - exposed: Any = ..., - incref: bool = ..., - manager_owned: bool = ..., + manager: Any = None, + authkey: AnyStr | None = None, + exposed: Any = None, + incref: bool = True, + manager_owned: bool = False, ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... def _callmethod(self, methodname: str, args: tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... @@ -132,34 +132,38 @@ class BaseManager: if sys.version_info >= (3, 11): def __init__( self, - address: Any | None = ..., - authkey: bytes | None = ..., - serializer: str = ..., - ctx: BaseContext | None = ..., + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, *, - shutdown_timeout: float = ..., + shutdown_timeout: float = 1.0, ) -> None: ... else: def __init__( - self, address: Any | None = ..., authkey: bytes | None = ..., serializer: str = ..., ctx: BaseContext | None = ... + self, + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, ) -> None: ... def get_server(self) -> Server: ... def connect(self) -> None: ... - def start(self, initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ...) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ...) -> None: ... def shutdown(self) -> None: ... # only available after start() was called - def join(self, timeout: float | None = ...) -> None: ... # undocumented + def join(self, timeout: float | None = None) -> None: ... # undocumented @property def address(self) -> Any: ... @classmethod def register( cls, typeid: str, - callable: Callable[..., object] | None = ..., - proxytype: Any = ..., - exposed: Sequence[str] | None = ..., - method_to_typeid: Mapping[str, str] | None = ..., - create_method: bool = ..., + callable: Callable[..., object] | None = None, + proxytype: Any = None, + exposed: Sequence[str] | None = None, + method_to_typeid: Mapping[str, str] | None = None, + create_method: bool = True, ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi index 2b97e16f0525..3e2d0c3cd51e 100644 --- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi @@ -26,8 +26,8 @@ class ApplyResult(Generic[_T]): error_callback: Callable[[BaseException], object] | None, ) -> None: ... - def get(self, timeout: float | None = ...) -> _T: ... - def wait(self, timeout: float | None = ...) -> None: ... + def get(self, timeout: float | None = None) -> _T: ... + def wait(self, timeout: float | None = None) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... if sys.version_info >= (3, 9): @@ -63,19 +63,19 @@ class IMapIterator(Iterator[_T]): def __init__(self, cache: dict[int, IMapIterator[Any]]) -> None: ... def __iter__(self: Self) -> Self: ... - def next(self, timeout: float | None = ...) -> _T: ... - def __next__(self, timeout: float | None = ...) -> _T: ... + def next(self, timeout: float | None = None) -> _T: ... + def __next__(self, timeout: float | None = None) -> _T: ... class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool: def __init__( self, - processes: int | None = ..., - initializer: Callable[..., object] | None = ..., + processes: int | None = None, + initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ..., - maxtasksperchild: int | None = ..., - context: Any | None = ..., + maxtasksperchild: int | None = None, + context: Any | None = None, ) -> None: ... def apply(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ...) -> _T: ... def apply_async( @@ -83,30 +83,28 @@ class Pool: func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[_T]: ... - def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ...) -> list[_T]: ... + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... def map_async( self, func: Callable[[_S], _T], iterable: Iterable[_S], - chunksize: int | None = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> MapResult[_T]: ... - def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ...) -> IMapIterator[_T]: ... - def imap_unordered( - self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ... - ) -> IMapIterator[_T]: ... - def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = ...) -> list[_T]: ... + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... def starmap_async( self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], - chunksize: int | None = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[list[_T]]: ... def close(self) -> None: ... def terminate(self) -> None: ... @@ -118,7 +116,7 @@ class Pool: class ThreadPool(Pool): def __init__( - self, processes: int | None = ..., initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ... + self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi index 3db6a84394b9..4fcbfd99a8d0 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi @@ -16,8 +16,8 @@ if sys.platform != "win32": def __init__(self, process_obj: BaseProcess) -> None: ... def duplicate_for_child(self, fd: int) -> int: ... - def poll(self, flag: int = ...) -> int | None: ... - def wait(self, timeout: float | None = ...) -> int | None: ... + def poll(self, flag: int = 1) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi index f5cb0a6c4844..3dc9d5bd7332 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -21,7 +21,7 @@ if sys.platform == "win32": def __init__(self, process_obj: BaseProcess) -> None: ... def duplicate_for_child(self, handle: int) -> int: ... - def wait(self, timeout: float | None = ...) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... def poll(self) -> int | None: ... def terminate(self) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/process.pyi b/mypy/typeshed/stdlib/multiprocessing/process.pyi index 7c8422e391c2..ef1b4b596d33 100644 --- a/mypy/typeshed/stdlib/multiprocessing/process.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/process.pyi @@ -14,20 +14,20 @@ class BaseProcess: _identity: tuple[int, ...] # undocumented def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ..., *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def run(self) -> None: ... def start(self) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def close(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... def is_alive(self) -> bool: ... @property def exitcode(self) -> int | None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/queues.pyi b/mypy/typeshed/stdlib/multiprocessing/queues.pyi index 02a67216c72b..7ba17dcfbe05 100644 --- a/mypy/typeshed/stdlib/multiprocessing/queues.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/queues.pyi @@ -12,9 +12,9 @@ _T = TypeVar("_T") class Queue(queue.Queue[_T]): # FIXME: `ctx` is a circular dependency and it's not actually optional. # It's marked as such to be able to use the generic Queue in __init__.pyi. - def __init__(self, maxsize: int = ..., *, ctx: Any = ...) -> None: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... - def put(self, obj: _T, block: bool = ..., timeout: float | None = ...) -> None: ... + def __init__(self, maxsize: int = 0, *, ctx: Any = ...) -> None: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def put(self, obj: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def get_nowait(self) -> _T: ... def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi index d6b70aefa48d..e5a8cde8f849 100644 --- a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi @@ -1,6 +1,6 @@ import pickle import sys -from _typeshed import HasFileno, SupportsWrite +from _typeshed import HasFileno, SupportsWrite, Unused from abc import ABCMeta from builtins import type as Type # alias to avoid name clash from collections.abc import Callable @@ -24,27 +24,27 @@ class ForkingPickler(pickle.Pickler): @classmethod def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... @classmethod - def dumps(cls, obj: Any, protocol: int | None = ...) -> memoryview: ... + def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... if sys.platform == "win32": if sys.version_info >= (3, 8): def duplicate( - handle: int, target_process: int | None = ..., inheritable: bool = ..., *, source_process: int | None = ... + handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None ) -> int: ... else: - def duplicate(handle: int, target_process: int | None = ..., inheritable: bool = ...) -> int: ... + def duplicate(handle: int, target_process: int | None = None, inheritable: bool = False) -> int: ... def steal_handle(source_pid: int, handle: int) -> int: ... def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... def recv_handle(conn: connection.PipeConnection) -> int: ... class DupHandle: - def __init__(self, handle: int, access: int, pid: int | None = ...) -> None: ... + def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... def detach(self) -> int: ... else: @@ -54,8 +54,7 @@ else: ACKNOWLEDGE: Literal[False] def recvfds(sock: socket, size: int) -> list[int]: ... - # destination_pid is unused - def send_handle(conn: HasFileno, handle: int, destination_pid: object) -> None: ... + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... def recv_handle(conn: HasFileno) -> int: ... def sendfds(sock: socket, fds: list[int]) -> None: ... def DupFd(fd: int) -> Any: ... # Return type is really hard to get right @@ -92,5 +91,4 @@ class AbstractReducer(metaclass=ABCMeta): sendfds = _sendfds recvfds = _recvfds DupFd = _DupFd - # *args are unused - def __init__(self, *args: object) -> None: ... + def __init__(self, *args: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi index 7708df9b6f3c..5fee7cf31e17 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -17,4 +17,4 @@ else: def __init__(self, fd: int) -> None: ... def detach(self) -> int: ... -def stop(timeout: float | None = ...) -> None: ... +def stop(timeout: float | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi index 3ce0ca3863cc..841c947360e8 100644 --- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -11,7 +11,7 @@ __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: - def __init__(self, name: str | None = ..., create: bool = ..., size: int = ...) -> None: ... + def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... @property def buf(self) -> memoryview: ... @property @@ -24,9 +24,9 @@ class SharedMemory: class ShareableList(Generic[_SLT]): shm: SharedMemory @overload - def __init__(self, sequence: None = ..., *, name: str | None = ...) -> None: ... + def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... @overload - def __init__(self, sequence: Iterable[_SLT], *, name: str | None = ...) -> None: ... + def __init__(self, sequence: Iterable[_SLT], *, name: str | None = None) -> None: ... def __getitem__(self, position: int) -> _SLT: ... def __setitem__(self, position: int, value: _SLT) -> None: ... def __reduce__(self: Self) -> tuple[Self, tuple[_SLT, ...]]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi index e988cda322f4..686a45d9ae41 100644 --- a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -21,56 +21,56 @@ def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[An @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload -def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = ...) -> _CT: ... +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... @overload def Value( - typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None ) -> SynchronizedBase[_CT]: ... @overload def Value( - typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None ) -> SynchronizedBase[Any]: ... @overload def Value( - typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True, ctx: BaseContext | None = None ) -> Any: ... @overload def Array( - typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = ... + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None ) -> _CT: ... @overload def Array( typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, - lock: Literal[True] | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, ) -> SynchronizedArray[_CT]: ... @overload def Array( typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, - lock: Literal[True] | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, ) -> SynchronizedArray[Any]: ... @overload def Array( typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, - lock: bool | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: bool | _LockLike = True, + ctx: BaseContext | None = None, ) -> Any: ... def copy(obj: _CT) -> _CT: ... @overload -def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = ..., ctx: Any | None = ...) -> Synchronized[_T]: ... +def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... @overload -def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedString: ... +def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... @overload -def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedArray[_CT]: ... +def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... @overload -def synchronized(obj: _CT, lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedBase[_CT]: ... +def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... class _AcquireFunc(Protocol): def __call__(self, block: bool = ..., timeout: float | None = ...) -> bool: ... @@ -78,7 +78,7 @@ class _AcquireFunc(Protocol): class SynchronizedBase(Generic[_CT]): acquire: _AcquireFunc release: Callable[[], None] - def __init__(self, obj: Any, lock: _LockLike | None = ..., ctx: Any | None = ...) -> None: ... + def __init__(self, obj: Any, lock: _LockLike | None = None, ctx: Any | None = None) -> None: ... def __reduce__(self) -> tuple[Callable[[Any, _LockLike], SynchronizedBase[Any]], tuple[Any, _LockLike]]: ... def get_obj(self) -> _CT: ... def get_lock(self) -> _LockLike: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/spawn.pyi b/mypy/typeshed/stdlib/multiprocessing/spawn.pyi index 50570ff3717b..26ff165756bf 100644 --- a/mypy/typeshed/stdlib/multiprocessing/spawn.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/spawn.pyi @@ -20,7 +20,7 @@ def get_executable() -> str: ... def is_forking(argv: Sequence[str]) -> bool: ... def freeze_support() -> None: ... def get_command_line(**kwds: Any) -> list[str]: ... -def spawn_main(pipe_handle: int, parent_pid: int | None = ..., tracker_fd: int | None = ...) -> None: ... +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... # undocumented def _main(fd: int) -> Any: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi index c89142f2cd3b..7043759078a2 100644 --- a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi @@ -11,18 +11,18 @@ _LockLike: TypeAlias = Lock | RLock class Barrier(threading.Barrier): def __init__( - self, parties: int, action: Callable[[], object] | None = ..., timeout: float | None = ..., *ctx: BaseContext + self, parties: int, action: Callable[[], object] | None = None, timeout: float | None = None, *ctx: BaseContext ) -> None: ... class BoundedSemaphore(Semaphore): - def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... class Condition(AbstractContextManager[bool]): - def __init__(self, lock: _LockLike | None = ..., *, ctx: BaseContext) -> None: ... - def notify(self, n: int = ...) -> None: ... + def __init__(self, lock: _LockLike | None = None, *, ctx: BaseContext) -> None: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], bool], timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], bool], timeout: float | None = None) -> bool: ... def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... def release(self) -> None: ... def __exit__( @@ -34,7 +34,7 @@ class Event: def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... @@ -43,7 +43,7 @@ class RLock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class Semaphore(SemLock): - def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... # Not part of public API class SemLock(AbstractContextManager[bool]): diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index 263781da9432..006ec3a9f6ce 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -1,5 +1,5 @@ import threading -from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc +from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc, Unused from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from logging import Logger, _Level as _LoggingLevel from typing import Any, SupportsInt @@ -37,7 +37,7 @@ def debug(msg: object, *args: object) -> None: ... def info(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... def get_logger() -> Logger: ... -def log_to_stderr(level: _LoggingLevel | None = ...) -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: bool @@ -51,12 +51,12 @@ class Finalize: obj: Incomplete | None, callback: Callable[..., Incomplete], args: Sequence[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., - exitpriority: int | None = ..., + kwargs: Mapping[str, Any] | None = None, + exitpriority: int | None = None, ) -> None: ... def __call__( self, - wr: object = ..., + wr: Unused = None, _finalizer_registry: MutableMapping[Incomplete, Incomplete] = ..., sub_debug: Callable[..., object] = ..., getpid: Callable[[], int] = ..., @@ -70,7 +70,7 @@ class ForkAwareThreadLock: acquire: Callable[[bool, float], bool] release: Callable[[], None] def __enter__(self) -> bool: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class ForkAwareLocal(threading.local): ... diff --git a/mypy/typeshed/stdlib/netrc.pyi b/mypy/typeshed/stdlib/netrc.pyi index 217c0eb542d0..480f55a46d64 100644 --- a/mypy/typeshed/stdlib/netrc.pyi +++ b/mypy/typeshed/stdlib/netrc.pyi @@ -8,7 +8,7 @@ class NetrcParseError(Exception): filename: str | None lineno: int | None msg: str - def __init__(self, msg: str, filename: StrOrBytesPath | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, msg: str, filename: StrOrBytesPath | None = None, lineno: int | None = None) -> None: ... # (login, account, password) tuple if sys.version_info >= (3, 11): @@ -19,5 +19,5 @@ else: class netrc: hosts: dict[str, _NetrcTuple] macros: dict[str, list[str]] - def __init__(self, file: StrOrBytesPath | None = ...) -> None: ... + def __init__(self, file: StrOrBytesPath | None = None) -> None: ... def authenticators(self, host: str) -> _NetrcTuple | None: ... diff --git a/mypy/typeshed/stdlib/nntplib.pyi b/mypy/typeshed/stdlib/nntplib.pyi index aa5bcba5726c..02e743ea9d1e 100644 --- a/mypy/typeshed/stdlib/nntplib.pyi +++ b/mypy/typeshed/stdlib/nntplib.pyi @@ -2,7 +2,7 @@ import datetime import socket import ssl import sys -from _typeshed import Self +from _typeshed import Self, Unused from builtins import list as _list # conflicts with a method named "list" from collections.abc import Iterable from typing import IO, Any, NamedTuple @@ -65,49 +65,49 @@ class NNTP: def __init__( self, host: str, - port: int = ..., - user: str | None = ..., - password: str | None = ..., - readermode: bool | None = ..., - usenetrc: bool = ..., + port: int = 119, + user: str | None = None, + password: str | None = None, + readermode: bool | None = None, + usenetrc: bool = False, timeout: float = ..., ) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> dict[str, _list[str]]: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def list(self, group_pattern: str | None = ..., *, file: _File = ...) -> tuple[str, _list[str]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... def description(self, group: str) -> str: ... def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... def group(self, name: str) -> tuple[str, int, int, int, str]: ... - def help(self, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def stat(self, message_spec: Any = ...) -> tuple[str, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... def next(self) -> tuple[str, int, str]: ... def last(self) -> tuple[str, int, str]: ... - def head(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... - def body(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... - def article(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... def slave(self) -> str: ... - def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def xover(self, start: int, end: int, *, file: _File = ...) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... def over( - self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = ... + self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... if sys.version_info < (3, 9): - def xgtitle(self, group: str, *, file: _File = ...) -> tuple[str, _list[tuple[str, str]]]: ... + def xgtitle(self, group: str, *, file: _File = None) -> tuple[str, _list[tuple[str, str]]]: ... def xpath(self, id: Any) -> tuple[str, str]: ... def date(self) -> tuple[str, datetime.datetime]: ... def post(self, data: bytes | Iterable[bytes]) -> str: ... def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... def quit(self) -> str: ... - def login(self, user: str | None = ..., password: str | None = ..., usenetrc: bool = ...) -> None: ... - def starttls(self, context: ssl.SSLContext | None = ...) -> None: ... + def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -115,11 +115,11 @@ class NNTP_SSL(NNTP): def __init__( self, host: str, - port: int = ..., - user: str | None = ..., - password: str | None = ..., - ssl_context: ssl.SSLContext | None = ..., - readermode: bool | None = ..., - usenetrc: bool = ..., + port: int = 563, + user: str | None = None, + password: str | None = None, + ssl_context: ssl.SSLContext | None = None, + readermode: bool | None = None, + usenetrc: bool = False, timeout: float = ..., ) -> None: ... diff --git a/mypy/typeshed/stdlib/ntpath.pyi b/mypy/typeshed/stdlib/ntpath.pyi index 0cd3e446475b..f1fa137c6d88 100644 --- a/mypy/typeshed/stdlib/ntpath.pyi +++ b/mypy/typeshed/stdlib/ntpath.pyi @@ -101,9 +101,9 @@ def join(__path: BytesPath, *paths: BytesPath) -> bytes: ... if sys.platform == "win32": if sys.version_info >= (3, 10): @overload - def realpath(path: PathLike[AnyStr], *, strict: bool = ...) -> AnyStr: ... + def realpath(path: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... @overload - def realpath(path: AnyStr, *, strict: bool = ...) -> AnyStr: ... + def realpath(path: AnyStr, *, strict: bool = False) -> AnyStr: ... else: @overload def realpath(path: PathLike[AnyStr]) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/numbers.pyi b/mypy/typeshed/stdlib/numbers.pyi index d94ae7faf890..55f21041ae44 100644 --- a/mypy/typeshed/stdlib/numbers.pyi +++ b/mypy/typeshed/stdlib/numbers.pyi @@ -60,7 +60,7 @@ class Real(Complex, SupportsFloat): def __ceil__(self) -> int: ... @abstractmethod @overload - def __round__(self, ndigits: None = ...) -> int: ... + def __round__(self, ndigits: None = None) -> int: ... @abstractmethod @overload def __round__(self, ndigits: int) -> Any: ... @@ -99,7 +99,7 @@ class Integral(Rational): def __int__(self) -> int: ... def __index__(self) -> int: ... @abstractmethod - def __pow__(self, exponent: Any, modulus: Any | None = ...) -> Any: ... + def __pow__(self, exponent: Any, modulus: Any | None = None) -> Any: ... @abstractmethod def __lshift__(self, other: Any) -> Any: ... @abstractmethod diff --git a/mypy/typeshed/stdlib/opcode.pyi b/mypy/typeshed/stdlib/opcode.pyi index 402dbb74cf58..1232454e71ea 100644 --- a/mypy/typeshed/stdlib/opcode.pyi +++ b/mypy/typeshed/stdlib/opcode.pyi @@ -49,9 +49,9 @@ HAVE_ARGUMENT: Literal[90] EXTENDED_ARG: Literal[144] if sys.version_info >= (3, 8): - def stack_effect(__opcode: int, __oparg: int | None = ..., *, jump: bool | None = ...) -> int: ... + def stack_effect(__opcode: int, __oparg: int | None = None, *, jump: bool | None = None) -> int: ... else: - def stack_effect(__opcode: int, __oparg: int | None = ...) -> int: ... + def stack_effect(__opcode: int, __oparg: int | None = None) -> int: ... hasnargs: list[int] diff --git a/mypy/typeshed/stdlib/optparse.pyi b/mypy/typeshed/stdlib/optparse.pyi index 5cff39717a7b..a8c1c4cfb93e 100644 --- a/mypy/typeshed/stdlib/optparse.pyi +++ b/mypy/typeshed/stdlib/optparse.pyi @@ -82,14 +82,14 @@ class HelpFormatter: class IndentedHelpFormatter(HelpFormatter): def __init__( - self, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ..., short_first: int = ... + self, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None, short_first: int = 1 ) -> None: ... def format_heading(self, heading: str) -> str: ... def format_usage(self, usage: str) -> str: ... class TitledHelpFormatter(HelpFormatter): def __init__( - self, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ..., short_first: int = ... + self, indent_increment: int = 0, max_help_position: int = 24, width: int | None = None, short_first: int = 0 ) -> None: ... def format_heading(self, heading: str) -> str: ... def format_usage(self, usage: str) -> str: ... @@ -167,18 +167,18 @@ class OptionGroup(OptionContainer): option_list: list[Option] parser: OptionParser title: str - def __init__(self, parser: OptionParser, title: str, description: str | None = ...) -> None: ... + def __init__(self, parser: OptionParser, title: str, description: str | None = None) -> None: ... def _create_option_list(self) -> None: ... def set_title(self, title: str) -> None: ... class Values: - def __init__(self, defaults: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, defaults: Mapping[str, Any] | None = None) -> None: ... def _update(self, dict: Mapping[str, Any], mode: Any) -> None: ... def _update_careful(self, dict: Mapping[str, Any]) -> None: ... def _update_loose(self, dict: Mapping[str, Any]) -> None: ... def ensure_value(self, attr: str, value: Any) -> Any: ... - def read_file(self, filename: str, mode: str = ...) -> None: ... - def read_module(self, modname: str, mode: str = ...) -> None: ... + def read_file(self, filename: str, mode: str = "careful") -> None: ... + def read_module(self, modname: str, mode: str = "careful") -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, __name: str, __value: Any) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -199,16 +199,16 @@ class OptionParser(OptionContainer): version: str def __init__( self, - usage: str | None = ..., - option_list: Iterable[Option] | None = ..., + usage: str | None = None, + option_list: Iterable[Option] | None = None, option_class: type[Option] = ..., - version: str | None = ..., - conflict_handler: str = ..., - description: str | None = ..., - formatter: HelpFormatter | None = ..., - add_help_option: bool = ..., - prog: str | None = ..., - epilog: str | None = ..., + version: str | None = None, + conflict_handler: str = "error", + description: str | None = None, + formatter: HelpFormatter | None = None, + add_help_option: bool = True, + prog: str | None = None, + epilog: str | None = None, ) -> None: ... def _add_help_option(self) -> None: ... def _add_version_option(self) -> None: ... @@ -217,7 +217,7 @@ class OptionParser(OptionContainer): def _get_args(self, args: Iterable[Any]) -> list[Any]: ... def _init_parsing_state(self) -> None: ... def _match_long_opt(self, opt: str) -> str: ... - def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = ...) -> None: ... + def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = True) -> None: ... def _process_args(self, largs: list[Any], rargs: list[Any], values: Values) -> None: ... def _process_long_opt(self, rargs: list[Any], values: Any) -> None: ... def _process_short_opts(self, rargs: list[Any], values: Any) -> None: ... @@ -229,23 +229,23 @@ class OptionParser(OptionContainer): def disable_interspersed_args(self) -> None: ... def enable_interspersed_args(self) -> None: ... def error(self, msg: str) -> None: ... - def exit(self, status: int = ..., msg: str | None = ...) -> None: ... + def exit(self, status: int = 0, msg: str | None = None) -> None: ... def expand_prog_name(self, s: str | None) -> Any: ... def format_epilog(self, formatter: HelpFormatter) -> Any: ... - def format_help(self, formatter: HelpFormatter | None = ...) -> str: ... - def format_option_help(self, formatter: HelpFormatter | None = ...) -> str: ... + def format_help(self, formatter: HelpFormatter | None = None) -> str: ... + def format_option_help(self, formatter: HelpFormatter | None = None) -> str: ... def get_default_values(self) -> Values: ... def get_option_group(self, opt_str: str) -> Any: ... def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... @overload - def parse_args(self, args: None = ..., values: Values | None = ...) -> tuple[Values, list[str]]: ... + def parse_args(self, args: None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... @overload - def parse_args(self, args: Sequence[AnyStr], values: Values | None = ...) -> tuple[Values, list[AnyStr]]: ... - def print_usage(self, file: IO[str] | None = ...) -> None: ... - def print_help(self, file: IO[str] | None = ...) -> None: ... - def print_version(self, file: IO[str] | None = ...) -> None: ... + def parse_args(self, args: Sequence[AnyStr], values: Values | None = None) -> tuple[Values, list[AnyStr]]: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... + def print_version(self, file: IO[str] | None = None) -> None: ... def set_default(self, dest: Any, value: Any) -> None: ... def set_defaults(self, **kwargs: Any) -> None: ... def set_process_default_values(self, process: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index ec31cc5e2a76..b1b9db9ae2a7 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -15,6 +15,7 @@ from _typeshed import ( StrOrBytesPath, StrPath, SupportsLenAndGetItem, + Unused, WriteableBuffer, structseq, ) @@ -366,7 +367,7 @@ class PathLike(Protocol[AnyStr_co]): def __fspath__(self) -> AnyStr_co: ... @overload -def listdir(path: StrPath | None = ...) -> list[str]: ... +def listdir(path: StrPath | None = None) -> list[str]: ... @overload def listdir(path: BytesPath) -> list[bytes]: ... @overload @@ -381,10 +382,10 @@ class DirEntry(Generic[AnyStr]): @property def path(self) -> AnyStr: ... def inode(self) -> int: ... - def is_dir(self, *, follow_symlinks: bool = ...) -> bool: ... - def is_file(self, *, follow_symlinks: bool = ...) -> bool: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... def is_symlink(self) -> bool: ... - def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... def __fspath__(self) -> AnyStr: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -436,7 +437,7 @@ def fspath(path: str) -> str: ... def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[AnyStr]) -> AnyStr: ... -def get_exec_path(env: Mapping[str, str] | None = ...) -> list[str]: ... +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... def getlogin() -> str: ... def getpid() -> int: ... def getppid() -> int: ... @@ -515,9 +516,9 @@ _Opener: TypeAlias = Callable[[str, int], int] @overload def fdopen( fd: int, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, errors: str | None = ..., newline: str | None = ..., closefd: bool = ..., @@ -528,7 +529,7 @@ def fdopen( fd: int, mode: OpenBinaryMode, buffering: Literal[0], - encoding: None = ..., + encoding: None = None, errors: None = ..., newline: None = ..., closefd: bool = ..., @@ -538,8 +539,8 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, errors: None = ..., newline: None = ..., closefd: bool = ..., @@ -549,8 +550,8 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, errors: None = ..., newline: None = ..., closefd: bool = ..., @@ -560,8 +561,8 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, errors: None = ..., newline: None = ..., closefd: bool = ..., @@ -571,8 +572,8 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryMode, - buffering: int = ..., - encoding: None = ..., + buffering: int = -1, + encoding: None = None, errors: None = ..., newline: None = ..., closefd: bool = ..., @@ -582,8 +583,8 @@ def fdopen( def fdopen( fd: int, mode: str, - buffering: int = ..., - encoding: str | None = ..., + buffering: int = -1, + encoding: str | None = None, errors: str | None = ..., newline: str | None = ..., closefd: bool = ..., @@ -593,7 +594,7 @@ def close(fd: int) -> None: ... def closerange(__fd_low: int, __fd_high: int) -> None: ... def device_encoding(fd: int) -> str | None: ... def dup(__fd: int) -> int: ... -def dup2(fd: int, fd2: int, inheritable: bool = ...) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... def fstat(fd: int) -> stat_result: ... def ftruncate(__fd: int, __length: int) -> None: ... def fsync(fd: FileDescriptorLike) -> None: ... @@ -603,7 +604,7 @@ if sys.platform != "win32" and sys.version_info >= (3, 11): def login_tty(__fd: int) -> None: ... def lseek(__fd: int, __position: int, __how: int) -> int: ... -def open(path: StrOrBytesPath, flags: int, mode: int = ..., *, dir_fd: int | None = ...) -> int: ... +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... def pipe() -> tuple[int, int]: ... def read(__fd: int, __length: int) -> bytes: ... @@ -625,8 +626,8 @@ if sys.platform != "win32": def pread(__fd: int, __length: int, __offset: int) -> bytes: ... def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not - def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = ...) -> int: ... - def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = ...) -> int: ... + def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = 0) -> int: ... + def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = 0) -> int: ... if sys.platform != "darwin": if sys.version_info >= (3, 10): RWF_APPEND: int # docs say available on 3.7+, stubtest says otherwise @@ -644,7 +645,7 @@ if sys.platform != "win32": count: int, headers: Sequence[ReadableBuffer] = ..., trailers: Sequence[ReadableBuffer] = ..., - flags: int = ..., + flags: int = 0, ) -> int: ... # FreeBSD and Mac OS X only def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ... @@ -674,7 +675,7 @@ if sys.platform != "win32": def write(__fd: int, __data: ReadableBuffer) -> int: ... def access( - path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True ) -> bool: ... def chdir(path: FileDescriptorOrPath) -> None: ... @@ -683,17 +684,17 @@ if sys.platform != "win32": def getcwd() -> str: ... def getcwdb() -> bytes: ... -def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... if sys.platform != "win32" and sys.platform != "linux": - def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix def lchflags(path: StrOrBytesPath, flags: int) -> None: ... def lchmod(path: StrOrBytesPath, mode: int) -> None: ... if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: ... def chown( - path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ... + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True ) -> None: ... def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... @@ -701,101 +702,105 @@ def link( src: StrOrBytesPath, dst: StrOrBytesPath, *, - src_dir_fd: int | None = ..., - dst_dir_fd: int | None = ..., - follow_symlinks: bool = ..., + src_dir_fd: int | None = None, + dst_dir_fd: int | None = None, + follow_symlinks: bool = True, ) -> None: ... -def lstat(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> stat_result: ... -def mkdir(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... if sys.platform != "win32": - def mkfifo(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... # Unix only + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only -def makedirs(name: StrOrBytesPath, mode: int = ..., exist_ok: bool = ...) -> None: ... +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... if sys.platform != "win32": - def mknod(path: StrOrBytesPath, mode: int = ..., device: int = ..., *, dir_fd: int | None = ...) -> None: ... + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... def major(__device: int) -> int: ... def minor(__device: int) -> int: ... def makedev(__major: int, __minor: int) -> int: ... def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only -def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... -def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... def removedirs(name: StrOrBytesPath) -> None: ... -def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... -def replace(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... -def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def replace( + src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... class _ScandirIterator(Iterator[DirEntry[AnyStr]], AbstractContextManager[_ScandirIterator[AnyStr]]): def __next__(self) -> DirEntry[AnyStr]: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def close(self) -> None: ... @overload -def scandir(path: None = ...) -> _ScandirIterator[str]: ... +def scandir(path: None = None) -> _ScandirIterator[str]: ... @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... if sys.platform != "win32": def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only -def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = ..., *, dir_fd: int | None = ...) -> None: ... +def symlink( + src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None +) -> None: ... if sys.platform != "win32": def sync() -> None: ... # Unix only def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 -def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... def utime( path: FileDescriptorOrPath, - times: tuple[int, int] | tuple[float, float] | None = ..., + times: tuple[int, int] | tuple[float, float] | None = None, *, ns: tuple[int, int] = ..., - dir_fd: int | None = ..., - follow_symlinks: bool = ..., + dir_fd: int | None = None, + follow_symlinks: bool = True, ) -> None: ... _OnError: TypeAlias = Callable[[OSError], object] def walk( - top: GenericPath[AnyStr], topdown: bool = ..., onerror: _OnError | None = ..., followlinks: bool = ... + top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False ) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... if sys.platform != "win32": @overload def fwalk( - top: StrPath = ..., - topdown: bool = ..., - onerror: _OnError | None = ..., + top: StrPath = ".", + topdown: bool = True, + onerror: _OnError | None = None, *, - follow_symlinks: bool = ..., - dir_fd: int | None = ..., + follow_symlinks: bool = False, + dir_fd: int | None = None, ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( top: BytesPath, - topdown: bool = ..., - onerror: _OnError | None = ..., + topdown: bool = True, + onerror: _OnError | None = None, *, - follow_symlinks: bool = ..., - dir_fd: int | None = ..., + follow_symlinks: bool = False, + dir_fd: int | None = None, ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... - def listxattr(path: FileDescriptorOrPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... - def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... def setxattr( path: FileDescriptorOrPath, attribute: StrOrBytesPath, value: ReadableBuffer, - flags: int = ..., + flags: int = 0, *, - follow_symlinks: bool = ..., + follow_symlinks: bool = True, ) -> None: ... def abort() -> NoReturn: ... @@ -849,7 +854,7 @@ class _wrap_close(_TextIOWrapper): def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... def close(self) -> int | None: ... # type: ignore[override] -def popen(cmd: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... +def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig @@ -881,7 +886,7 @@ def times() -> times_result: ... def waitpid(__pid: int, __options: int) -> tuple[int, int]: ... if sys.platform == "win32": - def startfile(path: StrOrBytesPath, operation: str | None = ...) -> None: ... + def startfile(path: StrOrBytesPath, operation: str | None = None) -> None: ... else: def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... @@ -978,7 +983,7 @@ if sys.platform != "win32": def sysconf(__name: str | int) -> int: ... if sys.platform == "linux": - def getrandom(size: int, flags: int = ...) -> bytes: ... + def getrandom(size: int, flags: int = 0) -> bytes: ... def urandom(__size: int) -> bytes: ... @@ -997,7 +1002,7 @@ if sys.version_info >= (3, 8): def __init__(self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object]) -> None: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def add_dll_directory(path: str) -> _AddedDllDirectory: ... if sys.platform == "linux": diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 79c2352a0f85..5220a142fb13 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -81,8 +81,8 @@ class Path(PurePath): @classmethod def cwd(cls: type[Self]) -> Self: ... if sys.version_info >= (3, 10): - def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... - def chmod(self, mode: int, *, follow_symlinks: bool = ...) -> None: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... else: def stat(self) -> stat_result: ... def chmod(self, mode: int) -> None: ... @@ -99,61 +99,61 @@ class Path(PurePath): def iterdir(self: Self) -> Generator[Self, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> stat_result: ... - def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... # Adapted from builtins.open # Text mode: always returns a TextIOWrapper # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. @overload def open( self, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @overload def open( - self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload def open( self, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedRandom: ... @overload def open( self, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedWriter: ... @overload def open( self, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @overload def open( - self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: int = -1, encoding: None = None, errors: None = None, newline: None = None ) -> BinaryIO: ... # Fallback if mode is not specified @overload def open( - self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + self, mode: str, buffering: int = -1, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> IO[Any]: ... if sys.platform != "win32": # These methods do "exist" on Windows, but they always raise NotImplementedError, @@ -171,16 +171,16 @@ class Path(PurePath): def rename(self, target: str | PurePath) -> None: ... def replace(self, target: str | PurePath) -> None: ... - def resolve(self: Self, strict: bool = ...) -> Self: ... + def resolve(self: Self, strict: bool = False) -> Self: ... def rglob(self: Self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... - def symlink_to(self, target: str | Path, target_is_directory: bool = ...) -> None: ... + def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): def hardlink_to(self, target: str | Path) -> None: ... - def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... if sys.version_info >= (3, 8): - def unlink(self, missing_ok: bool = ...) -> None: ... + def unlink(self, missing_ok: bool = False) -> None: ... else: def unlink(self) -> None: ... @@ -189,15 +189,15 @@ class Path(PurePath): def absolute(self: Self) -> Self: ... def expanduser(self: Self) -> Self: ... def read_bytes(self) -> bytes: ... - def read_text(self, encoding: str | None = ..., errors: str | None = ...) -> str: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... def samefile(self, other_path: StrPath) -> bool: ... def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): def write_text( - self, data: str, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> int: ... else: - def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... if sys.version_info >= (3, 8) and sys.version_info < (3, 12): def link_to(self, target: StrOrBytesPath) -> None: ... if sys.version_info >= (3, 12): diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index 6e95dcff6ee2..a2b6636d8665 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -18,12 +18,12 @@ line_prefix: str # undocumented class Restart(Exception): ... -def run(statement: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... -def runeval(expression: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> Any: ... +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... -def set_trace(*, header: str | None = ...) -> None: ... -def post_mortem(t: TracebackType | None = ...) -> None: ... +def set_trace(*, header: str | None = None) -> None: ... +def post_mortem(t: TracebackType | None = None) -> None: ... def pm() -> None: ... class Pdb(Bdb, Cmd): @@ -47,12 +47,12 @@ class Pdb(Bdb, Cmd): curframe_locals: Mapping[str, Any] def __init__( self, - completekey: str = ..., - stdin: IO[str] | None = ..., - stdout: IO[str] | None = ..., - skip: Iterable[str] | None = ..., - nosigint: bool = ..., - readrc: bool = ..., + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, ) -> None: ... def forget(self) -> None: ... def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ... @@ -66,7 +66,7 @@ class Pdb(Bdb, Cmd): def checkline(self, filename: str, lineno: int) -> int: ... def _getval(self, arg: str) -> object: ... def print_stack_trace(self) -> None: ... - def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = ...) -> None: ... + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... def lookupmodule(self, filename: str) -> str | None: ... if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... @@ -127,9 +127,9 @@ class Pdb(Bdb, Cmd): def message(self, msg: str) -> None: ... def error(self, msg: str) -> None: ... def _select_frame(self, number: int) -> None: ... - def _getval_except(self, arg: str, frame: FrameType | None = ...) -> object: ... + def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... def _print_lines( - self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = ... + self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = None ) -> None: ... def _cmdloop(self) -> None: ... def do_display(self, arg: str) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index f393452069a3..dc098cae97b7 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -107,36 +107,36 @@ if sys.version_info >= (3, 8): def dump( obj: Any, file: SupportsWrite[bytes], - protocol: int | None = ..., + protocol: int | None = None, *, - fix_imports: bool = ..., - buffer_callback: _BufferCallback = ..., + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, ) -> None: ... def dumps( - obj: Any, protocol: int | None = ..., *, fix_imports: bool = ..., buffer_callback: _BufferCallback = ... + obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None ) -> bytes: ... def load( file: _ReadableFileobj, *, - fix_imports: bool = ..., - encoding: str = ..., - errors: str = ..., + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", buffers: Iterable[Any] | None = ..., ) -> Any: ... def loads( __data: ReadableBuffer, *, - fix_imports: bool = ..., - encoding: str = ..., - errors: str = ..., + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", buffers: Iterable[Any] | None = ..., ) -> Any: ... else: - def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... - def dumps(obj: Any, protocol: int | None = ..., *, fix_imports: bool = ...) -> bytes: ... - def load(file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... - def loads(data: ReadableBuffer, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... + def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None, *, fix_imports: bool = True) -> None: ... + def dumps(obj: Any, protocol: int | None = None, *, fix_imports: bool = True) -> bytes: ... + def load(file: _ReadableFileobj, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... + def loads(data: ReadableBuffer, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... class PickleError(Exception): ... class PicklingError(PickleError): ... diff --git a/mypy/typeshed/stdlib/pickletools.pyi b/mypy/typeshed/stdlib/pickletools.pyi index 2f0d5f12f8a3..542172814926 100644 --- a/mypy/typeshed/stdlib/pickletools.pyi +++ b/mypy/typeshed/stdlib/pickletools.pyi @@ -40,7 +40,7 @@ def read_uint8(f: IO[bytes]) -> int: ... uint8: ArgumentDescriptor -def read_stringnl(f: IO[bytes], decode: bool = ..., stripquotes: bool = ...) -> bytes | str: ... +def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... stringnl: ArgumentDescriptor @@ -160,8 +160,8 @@ def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... def dis( pickle: bytes | bytearray | IO[bytes], - out: IO[str] | None = ..., - memo: MutableMapping[int, Any] | None = ..., - indentlevel: int = ..., - annotate: int = ..., + out: IO[str] | None = None, + memo: MutableMapping[int, Any] | None = None, + indentlevel: int = 4, + annotate: int = 0, ) -> None: ... diff --git a/mypy/typeshed/stdlib/pkgutil.pyi b/mypy/typeshed/stdlib/pkgutil.pyi index f91ab78ff35d..f9808c9e5de8 100644 --- a/mypy/typeshed/stdlib/pkgutil.pyi +++ b/mypy/typeshed/stdlib/pkgutil.pyi @@ -29,7 +29,7 @@ class ModuleInfo(NamedTuple): def extend_path(path: _PathT, name: str) -> _PathT: ... class ImpImporter: - def __init__(self, path: str | None = ...) -> None: ... + def __init__(self, path: str | None = None) -> None: ... class ImpLoader: def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... @@ -37,11 +37,11 @@ class ImpLoader: def find_loader(fullname: str) -> Loader | None: ... def get_importer(path_item: str) -> PathEntryFinder | None: ... def get_loader(module_or_name: str) -> Loader | None: ... -def iter_importers(fullname: str = ...) -> Iterator[MetaPathFinder | PathEntryFinder]: ... -def iter_modules(path: Iterable[str] | None = ..., prefix: str = ...) -> Iterator[ModuleInfo]: ... +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinder | PathEntryFinder]: ... +def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( - path: Iterable[str] | None = ..., prefix: str = ..., onerror: Callable[[str], object] | None = ... + path: Iterable[str] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None ) -> Iterator[ModuleInfo]: ... def get_data(package: str, resource: str) -> bytes | None: ... diff --git a/mypy/typeshed/stdlib/platform.pyi b/mypy/typeshed/stdlib/platform.pyi index 765a7a5ea5f9..291f302b4c7d 100644 --- a/mypy/typeshed/stdlib/platform.pyi +++ b/mypy/typeshed/stdlib/platform.pyi @@ -7,37 +7,39 @@ if sys.version_info < (3, 8): from typing import NamedTuple if sys.version_info >= (3, 8): - def libc_ver(executable: str | None = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... + def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... else: - def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... + def libc_ver( + executable: str = sys.executable, lib: str = "", version: str = "", chunksize: int = 16384 + ) -> tuple[str, str]: ... if sys.version_info < (3, 8): def linux_distribution( - distname: str = ..., - version: str = ..., - id: str = ..., + distname: str = "", + version: str = "", + id: str = "", supported_dists: tuple[str, ...] = ..., full_distribution_name: bool = ..., ) -> tuple[str, str, str]: ... def dist( - distname: str = ..., version: str = ..., id: str = ..., supported_dists: tuple[str, ...] = ... + distname: str = "", version: str = "", id: str = "", supported_dists: tuple[str, ...] = ... ) -> tuple[str, str, str]: ... -def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> tuple[str, str, str, str]: ... +def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... if sys.version_info >= (3, 8): def win32_edition() -> str: ... def win32_is_iot() -> bool: ... def mac_ver( - release: str = ..., versioninfo: tuple[str, str, str] = ..., machine: str = ... + release: str = "", versioninfo: tuple[str, str, str] = ..., machine: str = "" ) -> tuple[str, tuple[str, str, str], str]: ... def java_ver( - release: str = ..., vendor: str = ..., vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... + release: str = "", vendor: str = "", vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... -def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> tuple[str, str]: ... +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... class uname_result(NamedTuple): system: str diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 4ec9cbd5a31c..54ce3dc61abc 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -47,24 +47,24 @@ FMT_XML = PlistFormat.FMT_XML FMT_BINARY = PlistFormat.FMT_BINARY if sys.version_info >= (3, 9): - def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... def loads( - value: ReadableBuffer, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ... + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... ) -> Any: ... else: def load( fp: IO[bytes], *, - fmt: PlistFormat | None = ..., - use_builtin_types: bool = ..., + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def loads( value: ReadableBuffer, *, - fmt: PlistFormat | None = ..., - use_builtin_types: bool = ..., + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... @@ -73,15 +73,15 @@ def dump( fp: IO[bytes], *, fmt: PlistFormat = ..., - sort_keys: bool = ..., - skipkeys: bool = ..., + sort_keys: bool = True, + skipkeys: bool = False, ) -> None: ... def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., - skipkeys: bool = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + sort_keys: bool = True, ) -> bytes: ... if sys.version_info < (3, 9): @@ -104,4 +104,4 @@ if sys.version_info >= (3, 8): def __eq__(self, other: object) -> bool: ... class InvalidFileException(ValueError): - def __init__(self, message: str = ...) -> None: ... + def __init__(self, message: str = "Invalid file") -> None: ... diff --git a/mypy/typeshed/stdlib/poplib.pyi b/mypy/typeshed/stdlib/poplib.pyi index fd7afedaad05..c64e47e8ef72 100644 --- a/mypy/typeshed/stdlib/poplib.pyi +++ b/mypy/typeshed/stdlib/poplib.pyi @@ -25,13 +25,13 @@ class POP3: sock: socket.socket file: BinaryIO welcome: bytes - def __init__(self, host: str, port: int = ..., timeout: float = ...) -> None: ... + def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... def user(self, user: str) -> bytes: ... def pass_(self, pswd: str) -> bytes: ... def stat(self) -> tuple[int, int]: ... - def list(self, which: Any | None = ...) -> _LongResp: ... + def list(self, which: Any | None = None) -> _LongResp: ... def retr(self, which: Any) -> _LongResp: ... def dele(self, which: Any) -> bytes: ... def noop(self) -> bytes: ... @@ -48,17 +48,17 @@ class POP3: def uidl(self, which: Any) -> bytes: ... def utf8(self) -> bytes: ... def capa(self) -> dict[str, _list[str]]: ... - def stls(self, context: ssl.SSLContext | None = ...) -> bytes: ... + def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... class POP3_SSL(POP3): def __init__( self, host: str, - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., + port: int = 995, + keyfile: str | None = None, + certfile: str | None = None, timeout: float = ..., - context: ssl.SSLContext | None = ..., + context: ssl.SSLContext | None = None, ) -> None: ... # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> NoReturn: ... + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index ff9c2482ace5..1945190be5f8 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -118,9 +118,9 @@ def join(__a: BytesPath, *paths: BytesPath) -> bytes: ... if sys.version_info >= (3, 10): @overload - def realpath(filename: PathLike[AnyStr], *, strict: bool = ...) -> AnyStr: ... + def realpath(filename: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... @overload - def realpath(filename: AnyStr, *, strict: bool = ...) -> AnyStr: ... + def realpath(filename: AnyStr, *, strict: bool = False) -> AnyStr: ... else: @overload @@ -129,11 +129,11 @@ else: def realpath(filename: AnyStr) -> AnyStr: ... @overload -def relpath(path: LiteralString, start: LiteralString | None = ...) -> LiteralString: ... +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... @overload -def relpath(path: BytesPath, start: BytesPath | None = ...) -> bytes: ... +def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... @overload -def relpath(path: StrPath, start: StrPath | None = ...) -> str: ... +def relpath(path: StrPath, start: StrPath | None = None) -> str: ... @overload def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload diff --git a/mypy/typeshed/stdlib/pprint.pyi b/mypy/typeshed/stdlib/pprint.pyi index 0addc8f538b2..5a909c69b077 100644 --- a/mypy/typeshed/stdlib/pprint.pyi +++ b/mypy/typeshed/stdlib/pprint.pyi @@ -9,28 +9,28 @@ else: if sys.version_info >= (3, 10): def pformat( object: object, - indent: int = ..., - width: int = ..., - depth: int | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> str: ... elif sys.version_info >= (3, 8): def pformat( object: object, - indent: int = ..., - width: int = ..., - depth: int | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> str: ... else: - def pformat(object: object, indent: int = ..., width: int = ..., depth: int | None = ..., *, compact: bool = ...) -> str: ... + def pformat(object: object, indent: int = 1, width: int = 80, depth: int | None = None, *, compact: bool = False) -> str: ... if sys.version_info >= (3, 10): def pp( @@ -41,7 +41,7 @@ if sys.version_info >= (3, 10): depth: int | None = ..., *, compact: bool = ..., - sort_dicts: bool = ..., + sort_dicts: bool = False, underscore_numbers: bool = ..., ) -> None: ... @@ -54,43 +54,43 @@ elif sys.version_info >= (3, 8): depth: int | None = ..., *, compact: bool = ..., - sort_dicts: bool = ..., + sort_dicts: bool = False, ) -> None: ... if sys.version_info >= (3, 10): def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> None: ... elif sys.version_info >= (3, 8): def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> None: ... else: def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., + compact: bool = False, ) -> None: ... def isreadable(object: object) -> bool: ... @@ -101,35 +101,35 @@ class PrettyPrinter: if sys.version_info >= (3, 10): def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> None: ... else: def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., + compact: bool = False, ) -> None: ... def pformat(self, object: object) -> str: ... diff --git a/mypy/typeshed/stdlib/profile.pyi b/mypy/typeshed/stdlib/profile.pyi index 4b3f832d3224..8d6e9b220587 100644 --- a/mypy/typeshed/stdlib/profile.pyi +++ b/mypy/typeshed/stdlib/profile.pyi @@ -5,9 +5,9 @@ from typing_extensions import ParamSpec, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = ..., sort: str | int = ... + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: ... _T = TypeVar("_T") @@ -17,15 +17,15 @@ _Label: TypeAlias = tuple[str, int, str] class Profile: bias: int stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented - def __init__(self, timer: Callable[[], float] | None = ..., bias: int | None = ...) -> None: ... + def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... def set_cmd(self, cmd: str) -> None: ... def simulate_call(self, name: str) -> None: ... def simulate_cmd_complete(self) -> None: ... - def print_stats(self, sort: str | int = ...) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... def run(self: Self, cmd: str) -> Self: ... def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... - def calibrate(self, m: int, verbose: int = ...) -> float: ... + def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/mypy/typeshed/stdlib/pstats.pyi b/mypy/typeshed/stdlib/pstats.pyi index 10d817b59630..f4f331934565 100644 --- a/mypy/typeshed/stdlib/pstats.pyi +++ b/mypy/typeshed/stdlib/pstats.pyi @@ -50,7 +50,7 @@ class Stats: self: Self, __arg: None | str | Profile | _cProfile = ..., *args: None | str | Profile | _cProfile | Self, - stream: IO[Any] | None = ..., + stream: IO[Any] | None = None, ) -> None: ... def init(self, arg: None | str | Profile | _cProfile) -> None: ... def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... @@ -74,6 +74,6 @@ class Stats: def print_callees(self: Self, *amount: _Selector) -> Self: ... def print_callers(self: Self, *amount: _Selector) -> Self: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... - def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = ...) -> None: ... + def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = "->") -> None: ... def print_title(self) -> None: ... def print_line(self, func: str) -> None: ... diff --git a/mypy/typeshed/stdlib/py_compile.pyi b/mypy/typeshed/stdlib/py_compile.pyi index 1e9b6c2cb209..48f1d7dc3e70 100644 --- a/mypy/typeshed/stdlib/py_compile.pyi +++ b/mypy/typeshed/stdlib/py_compile.pyi @@ -9,7 +9,7 @@ class PyCompileError(Exception): exc_value: BaseException file: str msg: str - def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = ...) -> None: ... + def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): TIMESTAMP: int @@ -21,26 +21,26 @@ def _get_default_invalidation_mode() -> PycInvalidationMode: ... if sys.version_info >= (3, 8): def compile( file: AnyStr, - cfile: AnyStr | None = ..., - dfile: AnyStr | None = ..., - doraise: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., - quiet: int = ..., + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + quiet: int = 0, ) -> AnyStr | None: ... else: def compile( file: AnyStr, - cfile: AnyStr | None = ..., - dfile: AnyStr | None = ..., - doraise: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> AnyStr | None: ... if sys.version_info >= (3, 10): def main() -> None: ... else: - def main(args: list[str] | None = ...) -> int: ... + def main(args: list[str] | None = None) -> int: ... diff --git a/mypy/typeshed/stdlib/pyclbr.pyi b/mypy/typeshed/stdlib/pyclbr.pyi index ab19b44d7d79..38658a03139c 100644 --- a/mypy/typeshed/stdlib/pyclbr.pyi +++ b/mypy/typeshed/stdlib/pyclbr.pyi @@ -25,13 +25,13 @@ class Class: super_: list[Class | str] | None, file: str, lineno: int, - parent: Class | None = ..., + parent: Class | None = None, *, - end_lineno: int | None = ..., + end_lineno: int | None = None, ) -> None: ... else: def __init__( - self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = ... + self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None ) -> None: ... class Function: @@ -54,13 +54,13 @@ class Function: name: str, file: str, lineno: int, - parent: Function | Class | None = ..., - is_async: bool = ..., + parent: Function | Class | None = None, + is_async: bool = False, *, - end_lineno: int | None = ..., + end_lineno: int | None = None, ) -> None: ... else: - def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = ...) -> None: ... + def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... -def readmodule(module: str, path: Sequence[str] | None = ...) -> dict[str, Class]: ... -def readmodule_ex(module: str, path: Sequence[str] | None = ...) -> dict[str, Class | Function | list[str]]: ... +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index 0dd2739797f9..9bcd8659fc8c 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -26,7 +26,7 @@ def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... def cram(text: str, maxlen: int) -> str: ... def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... -def visiblename(name: str, all: Container[str] | None = ..., obj: object | None = ...) -> bool: ... +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... @@ -44,20 +44,20 @@ def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, Modu class Doc: PYTHONDOCS: str - def document(self, object: object, name: str | None = ..., *args: Any) -> str: ... - def fail(self, object: object, name: str | None = ..., *args: Any) -> NoReturn: ... + def document(self, object: object, name: str | None = None, *args: Any) -> str: ... + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... @abstractmethod - def docmodule(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docclass(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docroutine(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docother(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docproperty(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docdata(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... class HTMLRepr(Repr): @@ -75,32 +75,32 @@ class HTMLDoc(Doc): escape = _repr_instance.escape def page(self, title: str, contents: str) -> str: ... if sys.version_info >= (3, 11): - def heading(self, title: str, extras: str = ...) -> str: ... + def heading(self, title: str, extras: str = "") -> str: ... def section( self, title: str, cls: str, contents: str, - width: int = ..., - prelude: str = ..., - marginalia: str | None = ..., - gap: str = ..., + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", ) -> str: ... def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... else: - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... def section( self, title: str, fgcol: str, bgcol: str, contents: str, - width: int = ..., - prelude: str = ..., - marginalia: str | None = ..., - gap: str = ..., + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", ) -> str: ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = ...) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... def bigsection(self, title: str, *args: Any) -> str: ... def preformat(self, text: str) -> str: ... @@ -112,20 +112,20 @@ class HTMLDoc(Doc): def markup( self, text: str, - escape: Callable[[str], str] | None = ..., + escape: Callable[[str], str] | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., ) -> str: ... def formattree( - self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None ) -> str: ... - def docmodule(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... def docclass( self, object: object, - name: str | None = ..., - mod: str | None = ..., + name: str | None = None, + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., *ignored: Any, @@ -134,17 +134,17 @@ class HTMLDoc(Doc): def docroutine( # type: ignore[override] self, object: object, - name: str | None = ..., - mod: str | None = ..., + name: str | None = None, + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., - cl: type | None = ..., + cl: type | None = None, ) -> str: ... - def docproperty(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docother(self, object: object, name: str | None = ..., mod: Any | None = ..., *ignored: Any) -> str: ... - def docdata(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = ...) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Any) -> str: ... + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... def filelink(self, url: str, path: str) -> str: ... class TextRepr(Repr): @@ -157,25 +157,25 @@ class TextDoc(Doc): _repr_instance: TextRepr = ... repr = _repr_instance.repr def bold(self, text: str) -> str: ... - def indent(self, text: str, prefix: str = ...) -> str: ... + def indent(self, text: str, prefix: str = " ") -> str: ... def section(self, title: str, contents: str) -> str: ... def formattree( - self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ..., prefix: str = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" ) -> str: ... - def docmodule(self, object: object, name: str | None = ..., mod: Any | None = ...) -> str: ... # type: ignore[override] - def docclass(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... def formatvalue(self, object: object) -> str: ... - def docroutine(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docproperty(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docdata(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] def docother( # type: ignore[override] self, object: object, - name: str | None = ..., - mod: str | None = ..., - parent: str | None = ..., - maxlen: int | None = ..., - doc: Any | None = ..., + name: str | None = None, + mod: str | None = None, + parent: str | None = None, + maxlen: int | None = None, + doc: Any | None = None, ) -> str: ... def pager(text: str) -> None: ... @@ -192,16 +192,23 @@ text: TextDoc html: HTMLDoc def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... -def render_doc(thing: str | object, title: str = ..., forceload: bool = ..., renderer: Doc | None = ...) -> str: ... -def doc(thing: str | object, title: str = ..., forceload: bool = ..., output: SupportsWrite[str] | None = ...) -> None: ... +def render_doc( + thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None +) -> str: ... +def doc( + thing: str | object, + title: str = "Python Library Documentation: %s", + forceload: bool = ..., + output: SupportsWrite[str] | None = None, +) -> None: ... def writedoc(thing: str | object, forceload: bool = ...) -> None: ... -def writedocs(dir: str, pkgpath: str = ..., done: Any | None = ...) -> None: ... +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... class Helper: keywords: dict[str, str | tuple[str, str]] symbols: dict[str, str] topics: dict[str, str | tuple[str, ...]] - def __init__(self, input: IO[str] | None = ..., output: IO[str] | None = ...) -> None: ... + def __init__(self, input: IO[str] | None = None, output: IO[str] | None = None) -> None: ... @property def input(self) -> IO[str]: ... @property @@ -211,13 +218,13 @@ class Helper: def getline(self, prompt: str) -> str: ... def help(self, request: Any) -> None: ... def intro(self) -> None: ... - def list(self, items: _list[str], columns: int = ..., width: int = ...) -> None: ... + def list(self, items: _list[str], columns: int = 4, width: int = 80) -> None: ... def listkeywords(self) -> None: ... def listsymbols(self) -> None: ... def listtopics(self) -> None: ... - def showtopic(self, topic: str, more_xrefs: str = ...) -> None: ... + def showtopic(self, topic: str, more_xrefs: str = "") -> None: ... def showsymbol(self, symbol: str) -> None: ... - def listmodules(self, key: str = ...) -> None: ... + def listmodules(self, key: str = "") -> None: ... help: Helper @@ -226,9 +233,9 @@ class ModuleScanner: def run( self, callback: Callable[[str | None, str, str], object], - key: str | None = ..., - completer: Callable[[], object] | None = ..., - onerror: Callable[[str], object] | None = ..., + key: str | None = None, + completer: Callable[[], object] | None = None, + onerror: Callable[[str], object] | None = None, ) -> None: ... def apropos(key: str) -> None: ... diff --git a/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/mypy/typeshed/stdlib/pyexpat/__init__.pyi index 7e635c58c933..9e1eea08be54 100644 --- a/mypy/typeshed/stdlib/pyexpat/__init__.pyi +++ b/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -24,14 +24,14 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = ...) -> int: ... + def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = False) -> int: ... def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... def SetBase(self, __base: str) -> None: ... def GetBase(self) -> str | None: ... def GetInputContext(self) -> bytes | None: ... def ExternalEntityParserCreate(self, __context: str | None, __encoding: str = ...) -> XMLParserType: ... def SetParamEntityParsing(self, __flag: int) -> int: ... - def UseForeignDTD(self, __flag: bool = ...) -> None: ... + def UseForeignDTD(self, __flag: bool = True) -> None: ... @property def intern(self) -> dict[str, str]: ... buffer_size: int @@ -76,5 +76,5 @@ def ErrorString(__code: int) -> str: ... # intern is undocumented def ParserCreate( - encoding: str | None = ..., namespace_separator: str | None = ..., intern: dict[str, Any] | None = ... + encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None ) -> XMLParserType: ... diff --git a/mypy/typeshed/stdlib/queue.pyi b/mypy/typeshed/stdlib/queue.pyi index 7ea4beb664c5..3537e445ed97 100644 --- a/mypy/typeshed/stdlib/queue.pyi +++ b/mypy/typeshed/stdlib/queue.pyi @@ -23,14 +23,14 @@ class Queue(Generic[_T]): # Despite the fact that `queue` has `deque` type, # we treat it as `Any` to allow different implementations in subtypes. queue: Any # undocumented - def __init__(self, maxsize: int = ...) -> None: ... + def __init__(self, maxsize: int = 0) -> None: ... def _init(self, maxsize: int) -> None: ... def empty(self) -> bool: ... def full(self) -> bool: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... def get_nowait(self) -> _T: ... def _get(self) -> _T: ... - def put(self, item: _T, block: bool = ..., timeout: float | None = ...) -> None: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def _put(self, item: _T) -> None: ... def join(self) -> None: ... @@ -49,9 +49,9 @@ class LifoQueue(Queue[_T]): class SimpleQueue(Generic[_T]): def __init__(self) -> None: ... def empty(self) -> bool: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... def get_nowait(self) -> _T: ... - def put(self, item: _T, block: bool = ..., timeout: float | None = ...) -> None: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def qsize(self) -> int: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/quopri.pyi b/mypy/typeshed/stdlib/quopri.pyi index 549413226bdb..336f733f64c0 100644 --- a/mypy/typeshed/stdlib/quopri.pyi +++ b/mypy/typeshed/stdlib/quopri.pyi @@ -5,7 +5,7 @@ __all__ = ["encode", "decode", "encodestring", "decodestring"] class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... -def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: int = ...) -> None: ... -def encodestring(s: ReadableBuffer, quotetabs: int = ..., header: int = ...) -> bytes: ... -def decode(input: _Input, output: SupportsWrite[bytes], header: int = ...) -> None: ... -def decodestring(s: str | ReadableBuffer, header: int = ...) -> bytes: ... +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: int = False) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: int = False, header: int = False) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: int = False) -> None: ... +def decodestring(s: str | ReadableBuffer, header: int = False) -> bytes: ... diff --git a/mypy/typeshed/stdlib/random.pyi b/mypy/typeshed/stdlib/random.pyi index a2a1d956e78f..4849878691f5 100644 --- a/mypy/typeshed/stdlib/random.pyi +++ b/mypy/typeshed/stdlib/random.pyi @@ -39,18 +39,18 @@ _T = TypeVar("_T") class Random(_random.Random): VERSION: ClassVar[int] - def __init__(self, x: Any = ...) -> None: ... + def __init__(self, x: Any = None) -> None: ... # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. if sys.version_info >= (3, 9): - def seed(self, a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... # type: ignore[override] # noqa: Y041 + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 else: - def seed(self, a: Any = ..., version: int = ...) -> None: ... + def seed(self, a: Any = None, version: int = 2) -> None: ... def getstate(self) -> tuple[Any, ...]: ... def setstate(self, state: tuple[Any, ...]) -> None: ... - def randrange(self, start: int, stop: int | None = ..., step: int = ...) -> int: ... + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... def randint(self, a: int, b: int) -> int: ... if sys.version_info >= (3, 9): def randbytes(self, n: int) -> bytes: ... @@ -59,32 +59,32 @@ class Random(_random.Random): def choices( self, population: SupportsLenAndGetItem[_T], - weights: Sequence[float | Fraction] | None = ..., + weights: Sequence[float | Fraction] | None = None, *, - cum_weights: Sequence[float | Fraction] | None = ..., - k: int = ..., + cum_weights: Sequence[float | Fraction] | None = None, + k: int = 1, ) -> list[_T]: ... if sys.version_info >= (3, 11): def shuffle(self, x: MutableSequence[Any]) -> None: ... else: - def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = ...) -> None: ... + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... if sys.version_info >= (3, 11): - def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = ...) -> list[_T]: ... + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... elif sys.version_info >= (3, 9): def sample( - self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = ... + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None ) -> list[_T]: ... else: def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... def uniform(self, a: float, b: float) -> float: ... - def triangular(self, low: float = ..., high: float = ..., mode: float | None = ...) -> float: ... + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... if sys.version_info >= (3, 11): - def gauss(self, mu: float = ..., sigma: float = ...) -> float: ... - def normalvariate(self, mu: float = ..., sigma: float = ...) -> float: ... + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... else: def gauss(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi index 3e52d209eb87..f45ac7383e5d 100644 --- a/mypy/typeshed/stdlib/re.pyi +++ b/mypy/typeshed/stdlib/re.pyi @@ -67,7 +67,9 @@ class Match(Generic[AnyStr]): @overload def expand(self: Match[str], template: str) -> str: ... @overload - def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... + def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... # type: ignore[misc] + @overload + def expand(self, template: AnyStr) -> AnyStr: ... # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload def group(self, __group: Literal[0] = ...) -> AnyStr: ... @@ -87,9 +89,9 @@ class Match(Generic[AnyStr]): def groupdict(self) -> dict[str, AnyStr | Any]: ... @overload def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... - def start(self, __group: int | str = ...) -> int: ... - def end(self, __group: int | str = ...) -> int: ... - def span(self, __group: int | str = ...) -> tuple[int, int]: ... + def start(self, __group: int | str = 0) -> int: ... + def end(self, __group: int | str = 0) -> int: ... + def span(self, __group: int | str = 0) -> tuple[int, int]: ... @property def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @@ -113,48 +115,64 @@ class Pattern(Generic[AnyStr]): @property def pattern(self) -> AnyStr: ... @overload - def search(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] + @overload + def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] @overload - def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def match(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... @overload - def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] @overload - def fullmatch(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | Any]: ... @overload - def split(self: Pattern[str], string: str, maxsplit: int = ...) -> list[str | Any]: ... + def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | Any]: ... @overload - def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = ...) -> list[bytes | Any]: ... + def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | Any]: ... # return type depends on the number of groups in the pattern @overload - def findall(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> list[Any]: ... + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload - def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> list[Any]: ... + def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload - def finditer(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Iterator[Match[str]]: ... + def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... @overload - def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Iterator[Match[bytes]]: ... + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... @overload - def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> str: ... + def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ... # type: ignore[misc] @overload - def sub( + def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... + @overload + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... + @overload + def sub( # type: ignore[misc] self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., + count: int = 0, ) -> bytes: ... @overload - def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> tuple[str, int]: ... + def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... @overload - def subn( + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... + @overload + def subn( # type: ignore[misc] self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., + count: int = 0, ) -> tuple[bytes, int]: ... + @overload + def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ... def __copy__(self) -> Pattern[AnyStr]: ... def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... if sys.version_info >= (3, 9): @@ -212,59 +230,59 @@ _FlagsType: TypeAlias = int | RegexFlag # pattern arguments do *not* accept arbitrary buffers such as bytearray, # because the pattern must be hashable. @overload -def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def split(pattern: str | Pattern[str], string: str, maxsplit: int = ..., flags: _FlagsType = ...) -> list[str | Any]: ... +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | Any]: ... @overload def split( - pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = ..., flags: _FlagsType = ... + pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 ) -> list[bytes | Any]: ... @overload -def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> list[Any]: ... +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... @overload -def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> list[Any]: ... +def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... @overload -def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Iterator[Match[str]]: ... +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... @overload -def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Iterator[Match[bytes]]: ... +def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... @overload def sub( - pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> str: ... @overload def sub( pattern: bytes | Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., - flags: _FlagsType = ..., + count: int = 0, + flags: _FlagsType = 0, ) -> bytes: ... @overload def subn( - pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> tuple[str, int]: ... @overload def subn( pattern: bytes | Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., - flags: _FlagsType = ..., + count: int = 0, + flags: _FlagsType = 0, ) -> tuple[bytes, int]: ... def escape(pattern: AnyStr) -> AnyStr: ... def purge() -> None: ... -def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... diff --git a/mypy/typeshed/stdlib/readline.pyi b/mypy/typeshed/stdlib/readline.pyi index ceca2e32f221..14c01a986351 100644 --- a/mypy/typeshed/stdlib/readline.pyi +++ b/mypy/typeshed/stdlib/readline.pyi @@ -8,13 +8,13 @@ if sys.platform != "win32": _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] def parse_and_bind(__string: str) -> None: ... - def read_init_file(__filename: StrOrBytesPath | None = ...) -> None: ... + def read_init_file(__filename: StrOrBytesPath | None = None) -> None: ... def get_line_buffer() -> str: ... def insert_text(__string: str) -> None: ... def redisplay() -> None: ... - def read_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... - def write_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... - def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = ...) -> None: ... + def read_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def write_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = None) -> None: ... def get_history_length() -> int: ... def set_history_length(__length: int) -> None: ... def clear_history() -> None: ... @@ -24,13 +24,13 @@ if sys.platform != "win32": def replace_history_item(__pos: int, __line: str) -> None: ... def add_history(__string: str) -> None: ... def set_auto_history(__enabled: bool) -> None: ... - def set_startup_hook(__function: Callable[[], object] | None = ...) -> None: ... - def set_pre_input_hook(__function: Callable[[], object] | None = ...) -> None: ... - def set_completer(__function: _Completer | None = ...) -> None: ... + def set_startup_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_pre_input_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_completer(__function: _Completer | None = None) -> None: ... def get_completer() -> _Completer | None: ... def get_completion_type() -> int: ... def get_begidx() -> int: ... def get_endidx() -> int: ... def set_completer_delims(__string: str) -> None: ... def get_completer_delims() -> str: ... - def set_completion_display_matches_hook(__function: _CompDisp | None = ...) -> None: ... + def set_completion_display_matches_hook(__function: _CompDisp | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/reprlib.pyi b/mypy/typeshed/stdlib/reprlib.pyi index 9955f12627a3..21c8a5cd4e0c 100644 --- a/mypy/typeshed/stdlib/reprlib.pyi +++ b/mypy/typeshed/stdlib/reprlib.pyi @@ -8,7 +8,7 @@ __all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc: TypeAlias = Callable[[Any], str] -def recursive_repr(fillvalue: str = ...) -> Callable[[_ReprFunc], _ReprFunc]: ... +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... class Repr: maxlevel: int diff --git a/mypy/typeshed/stdlib/rlcompleter.pyi b/mypy/typeshed/stdlib/rlcompleter.pyi index 1840b7cfced7..8d9477e3ee45 100644 --- a/mypy/typeshed/stdlib/rlcompleter.pyi +++ b/mypy/typeshed/stdlib/rlcompleter.pyi @@ -3,7 +3,7 @@ from typing import Any __all__ = ["Completer"] class Completer: - def __init__(self, namespace: dict[str, Any] | None = ...) -> None: ... + def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... def complete(self, text: str, state: int) -> str | None: ... def attr_matches(self, text: str) -> list[str]: ... def global_matches(self, text: str) -> list[str]: ... diff --git a/mypy/typeshed/stdlib/runpy.pyi b/mypy/typeshed/stdlib/runpy.pyi index 256f8dab14e9..7efc194c8c66 100644 --- a/mypy/typeshed/stdlib/runpy.pyi +++ b/mypy/typeshed/stdlib/runpy.pyi @@ -1,4 +1,4 @@ -from _typeshed import Self +from _typeshed import Self, Unused from types import ModuleType from typing import Any @@ -9,15 +9,15 @@ class _TempModule: module: ModuleType def __init__(self, mod_name: str) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class _ModifiedArgv0: value: Any def __init__(self, value: Any) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def run_module( - mod_name: str, init_globals: dict[str, Any] | None = ..., run_name: str | None = ..., alter_sys: bool = ... + mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False ) -> dict[str, Any]: ... -def run_path(path_name: str, init_globals: dict[str, Any] | None = ..., run_name: str | None = ...) -> dict[str, Any]: ... +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... diff --git a/mypy/typeshed/stdlib/sched.pyi b/mypy/typeshed/stdlib/sched.pyi index 29c84f951124..a8ec78d68fd2 100644 --- a/mypy/typeshed/stdlib/sched.pyi +++ b/mypy/typeshed/stdlib/sched.pyi @@ -35,7 +35,7 @@ class scheduler: def enter( self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... ) -> Event: ... - def run(self, blocking: bool = ...) -> float | None: ... + def run(self, blocking: bool = True) -> float | None: ... def cancel(self, event: Event) -> None: ... def empty(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/secrets.pyi b/mypy/typeshed/stdlib/secrets.pyi index 99b7c14ebafc..4861b6f09340 100644 --- a/mypy/typeshed/stdlib/secrets.pyi +++ b/mypy/typeshed/stdlib/secrets.pyi @@ -10,6 +10,6 @@ _T = TypeVar("_T") def randbelow(exclusive_upper_bound: int) -> int: ... def randbits(k: int) -> int: ... def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... -def token_bytes(nbytes: int | None = ...) -> bytes: ... -def token_hex(nbytes: int | None = ...) -> str: ... -def token_urlsafe(nbytes: int | None = ...) -> str: ... +def token_bytes(nbytes: int | None = None) -> bytes: ... +def token_hex(nbytes: int | None = None) -> str: ... +def token_urlsafe(nbytes: int | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/select.pyi b/mypy/typeshed/stdlib/select.pyi index 63989730a7e9..d02651320cf6 100644 --- a/mypy/typeshed/stdlib/select.pyi +++ b/mypy/typeshed/stdlib/select.pyi @@ -27,7 +27,7 @@ class poll: def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... def select( - __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = ... + __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = None ) -> tuple[list[Any], list[Any], list[Any]]: ... error = OSError @@ -58,7 +58,7 @@ if sys.platform != "linux" and sys.platform != "win32": def __init__(self) -> None: ... def close(self) -> None: ... def control( - self, __changelist: Iterable[kevent] | None, __maxevents: int, __timeout: float | None = ... + self, __changelist: Iterable[kevent] | None, __maxevents: int, __timeout: float | None = None ) -> list[kevent]: ... def fileno(self) -> int: ... @classmethod @@ -109,9 +109,9 @@ if sys.platform == "linux": def __enter__(self: Self) -> Self: ... def __exit__( self, - __exc_type: type[BaseException] | None = ..., + __exc_type: type[BaseException] | None = None, __exc_val: BaseException | None = ..., - __exc_tb: TracebackType | None = ..., + __exc_tb: TracebackType | None = None, ) -> None: ... def close(self) -> None: ... closed: bool @@ -119,7 +119,7 @@ if sys.platform == "linux": def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = ..., maxevents: int = ...) -> list[tuple[int, int]]: ... + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... @classmethod def fromfd(cls, __fd: FileDescriptorLike) -> epoll: ... EPOLLERR: int diff --git a/mypy/typeshed/stdlib/selectors.pyi b/mypy/typeshed/stdlib/selectors.pyi index 95dfaa41a5c0..e15780fadee1 100644 --- a/mypy/typeshed/stdlib/selectors.pyi +++ b/mypy/typeshed/stdlib/selectors.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import FileDescriptor, FileDescriptorLike, Self +from _typeshed import FileDescriptor, FileDescriptorLike, Self, Unused from abc import ABCMeta, abstractmethod from collections.abc import Mapping from typing import Any, NamedTuple @@ -18,38 +18,38 @@ class SelectorKey(NamedTuple): class BaseSelector(metaclass=ABCMeta): @abstractmethod - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def close(self) -> None: ... def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... @abstractmethod def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class SelectSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform != "win32": class PollSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform == "linux": class EpollSelector(BaseSelector): def fileno(self) -> int: ... - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DevpollSelector(BaseSelector): @@ -61,13 +61,13 @@ class DevpollSelector(BaseSelector): class KqueueSelector(BaseSelector): def fileno(self) -> int: ... - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DefaultSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi index c801ecd3f186..d55e08bffa16 100644 --- a/mypy/typeshed/stdlib/shelve.pyi +++ b/mypy/typeshed/stdlib/shelve.pyi @@ -11,7 +11,7 @@ _VT = TypeVar("_VT") class Shelf(MutableMapping[str, _VT]): def __init__( - self, dict: MutableMapping[bytes, bytes], protocol: int | None = ..., writeback: bool = ..., keyencoding: str = ... + self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" ) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... @@ -38,6 +38,6 @@ class BsdDbShelf(Shelf[_VT]): def last(self) -> tuple[str, _VT]: ... class DbfilenameShelf(Shelf[_VT]): - def __init__(self, filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> None: ... + def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... -def open(filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> Shelf[Any]: ... +def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/mypy/typeshed/stdlib/shlex.pyi b/mypy/typeshed/stdlib/shlex.pyi index f9d660594a5a..9a578d186be8 100644 --- a/mypy/typeshed/stdlib/shlex.pyi +++ b/mypy/typeshed/stdlib/shlex.pyi @@ -8,7 +8,7 @@ if sys.version_info >= (3, 8): else: __all__ = ["shlex", "split", "quote"] -def split(s: str, comments: bool = ..., posix: bool = ...) -> list[str]: ... +def split(s: str, comments: bool = False, posix: bool = True) -> list[str]: ... if sys.version_info >= (3, 8): def join(split_command: Iterable[str]) -> str: ... @@ -34,17 +34,17 @@ class shlex(Iterable[str]): def punctuation_chars(self) -> str: ... def __init__( self, - instream: str | TextIO | None = ..., - infile: str | None = ..., - posix: bool = ..., - punctuation_chars: bool | str = ..., + instream: str | TextIO | None = None, + infile: str | None = None, + posix: bool = False, + punctuation_chars: bool | str = False, ) -> None: ... def get_token(self) -> str: ... def push_token(self, tok: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, newfile: str) -> tuple[str, TextIO]: ... - def push_source(self, newstream: str | TextIO, newfile: str | None = ...) -> None: ... + def push_source(self, newstream: str | TextIO, newfile: str | None = None) -> None: ... def pop_source(self) -> None: ... - def error_leader(self, infile: str | None = ..., lineno: int | None = ...) -> None: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> None: ... def __iter__(self: Self) -> Self: ... def __next__(self) -> str: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 6dbfbcc06998..0e4f521e5e34 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -47,39 +47,44 @@ class ExecError(OSError): ... class ReadError(OSError): ... class RegistryError(Exception): ... -def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = ...) -> None: ... -def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = ...) -> _StrOrBytesPathT: ... -def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... -def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... +if sys.version_info >= (3, 8): + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... + +else: + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 16384) -> None: ... + +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... @overload -def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... if sys.version_info >= (3, 8): def copytree( src: StrPath, dst: StrPath, - symlinks: bool = ..., - ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = ..., + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, copy_function: Callable[[str, str], object] = ..., - ignore_dangling_symlinks: bool = ..., - dirs_exist_ok: bool = ..., + ignore_dangling_symlinks: bool = False, + dirs_exist_ok: bool = False, ) -> _PathReturn: ... else: def copytree( src: StrPath, dst: StrPath, - symlinks: bool = ..., - ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = ..., + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, copy_function: Callable[[str, str], object] = ..., - ignore_dangling_symlinks: bool = ..., + ignore_dangling_symlinks: bool = False, ) -> _PathReturn: ... _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], Any, Any], object] @@ -124,9 +129,9 @@ def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... # see https://bugs.python.org/issue33140. We keep it here because it's # in __all__. @overload -def chown(path: StrOrBytesPath, user: str | int, group: None = ...) -> None: ... +def chown(path: StrOrBytesPath, user: str | int, group: None = None) -> None: ... @overload -def chown(path: StrOrBytesPath, user: None = ..., *, group: str | int) -> None: ... +def chown(path: StrOrBytesPath, user: None = None, *, group: str | int) -> None: ... @overload def chown(path: StrOrBytesPath, user: None, group: str | int) -> None: ... @overload @@ -134,46 +139,46 @@ def chown(path: StrOrBytesPath, user: str | int, group: str | int) -> None: ... if sys.version_info >= (3, 8): @overload - def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... @overload - def which(cmd: bytes, mode: int = ..., path: StrPath | None = ...) -> bytes | None: ... + def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... else: - def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... def make_archive( base_name: str, format: str, - root_dir: StrPath | None = ..., - base_dir: StrPath | None = ..., + root_dir: StrPath | None = None, + base_dir: StrPath | None = None, verbose: bool = ..., dry_run: bool = ..., - owner: str | None = ..., - group: str | None = ..., - logger: Any | None = ..., + owner: str | None = None, + group: str | None = None, + logger: Any | None = None, ) -> str: ... def get_archive_formats() -> list[tuple[str, str]]: ... @overload def register_archive_format( - name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = ... + name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" ) -> None: ... @overload def register_archive_format( - name: str, function: Callable[[str, str], object], extra_args: None = ..., description: str = ... + name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... def unregister_archive_format(name: str) -> None: ... -def unpack_archive(filename: StrPath, extract_dir: StrPath | None = ..., format: str | None = ...) -> None: ... +def unpack_archive(filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None) -> None: ... @overload def register_unpack_format( name: str, extensions: list[str], function: Callable[..., object], extra_args: Sequence[tuple[str, Any]], - description: str = ..., + description: str = "", ) -> None: ... @overload def register_unpack_format( - name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = ..., description: str = ... + name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... def unregister_unpack_format(name: str) -> None: ... def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index 8e9bd990a2c2..e0d7364c6b4e 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -134,7 +134,7 @@ else: else: def pthread_sigmask(__how: int, __mask: Iterable[int]) -> set[_SIGNUM]: ... - def setitimer(__which: int, __seconds: float, __interval: float = ...) -> tuple[float, float]: ... + def setitimer(__which: int, __seconds: float, __interval: float = 0.0) -> tuple[float, float]: ... def siginterrupt(__signalnum: int, __flag: bool) -> None: ... def sigpending() -> Any: ... if sys.version_info >= (3, 10): # argument changed in 3.10.2 diff --git a/mypy/typeshed/stdlib/site.pyi b/mypy/typeshed/stdlib/site.pyi index 53199db0eaf3..a8c6bcb417f4 100644 --- a/mypy/typeshed/stdlib/site.pyi +++ b/mypy/typeshed/stdlib/site.pyi @@ -9,14 +9,14 @@ USER_BASE: str | None def main() -> None: ... def abs_paths() -> None: ... # undocumented def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented -def addsitedir(sitedir: str, known_paths: set[str] | None = ...) -> None: ... -def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = ...) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented def check_enableusersite() -> bool | None: ... # undocumented def enablerlcompleter() -> None: ... # undocumented def execsitecustomize() -> None: ... # undocumented def execusercustomize() -> None: ... # undocumented -def getsitepackages(prefixes: Iterable[str] | None = ...) -> list[str]: ... +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... def getuserbase() -> str: ... def getusersitepackages() -> str: ... def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented diff --git a/mypy/typeshed/stdlib/smtpd.pyi b/mypy/typeshed/stdlib/smtpd.pyi index f2de6c155c07..7392bd51627d 100644 --- a/mypy/typeshed/stdlib/smtpd.pyi +++ b/mypy/typeshed/stdlib/smtpd.pyi @@ -41,10 +41,10 @@ class SMTPChannel(asynchat.async_chat): server: SMTPServer, conn: socket.socket, addr: Any, - data_size_limit: int = ..., - map: asyncore._MapType | None = ..., - enable_SMTPUTF8: bool = ..., - decode_data: bool = ..., + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, ) -> None: ... # base asynchat.async_chat.push() accepts bytes def push(self, msg: str) -> None: ... # type: ignore[override] @@ -71,10 +71,10 @@ class SMTPServer(asyncore.dispatcher): self, localaddr: _Address, remoteaddr: _Address, - data_size_limit: int = ..., - map: asyncore._MapType | None = ..., - enable_SMTPUTF8: bool = ..., - decode_data: bool = ..., + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, ) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message( diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index 9fedd6f316d1..d0d674242bf8 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -89,26 +89,26 @@ class SMTP: local_hostname: str def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., + host: str = "", + port: int = 0, + local_hostname: str | None = None, timeout: float = ..., - source_address: _SourceAddress | None = ..., + source_address: _SourceAddress | None = None, ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... - def connect(self, host: str = ..., port: int = ..., source_address: _SourceAddress | None = ...) -> _Reply: ... + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... def send(self, s: ReadableBuffer | str) -> None: ... - def putcmd(self, cmd: str, args: str = ...) -> None: ... + def putcmd(self, cmd: str, args: str = "") -> None: ... def getreply(self) -> _Reply: ... - def docmd(self, cmd: str, args: str = ...) -> _Reply: ... - def helo(self, name: str = ...) -> _Reply: ... - def ehlo(self, name: str = ...) -> _Reply: ... + def docmd(self, cmd: str, args: str = "") -> _Reply: ... + def helo(self, name: str = "") -> _Reply: ... + def ehlo(self, name: str = "") -> _Reply: ... def has_extn(self, opt: str) -> bool: ... - def help(self, args: str = ...) -> bytes: ... + def help(self, args: str = "") -> bytes: ... def rset(self) -> _Reply: ... def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... @@ -120,15 +120,15 @@ class SMTP: def ehlo_or_helo_if_needed(self) -> None: ... user: str password: str - def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = ...) -> _Reply: ... + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... @overload - def auth_cram_md5(self, challenge: None = ...) -> None: ... + def auth_cram_md5(self, challenge: None = None) -> None: ... @overload def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... - def auth_plain(self, challenge: ReadableBuffer | None = ...) -> str: ... - def auth_login(self, challenge: ReadableBuffer | None = ...) -> str: ... - def login(self, user: str, password: str, *, initial_response_ok: bool = ...) -> _Reply: ... - def starttls(self, keyfile: str | None = ..., certfile: str | None = ..., context: SSLContext | None = ...) -> _Reply: ... + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... + def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: ... def sendmail( self, from_addr: str, @@ -140,8 +140,8 @@ class SMTP: def send_message( self, msg: _Message, - from_addr: str | None = ..., - to_addrs: str | Sequence[str] | None = ..., + from_addr: str | None = None, + to_addrs: str | Sequence[str] | None = None, mail_options: Sequence[str] = ..., rcpt_options: Sequence[str] = ..., ) -> _SendErrs: ... @@ -154,14 +154,14 @@ class SMTP_SSL(SMTP): context: SSLContext def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., - keyfile: str | None = ..., - certfile: str | None = ..., + host: str = "", + port: int = 0, + local_hostname: str | None = None, + keyfile: str | None = None, + certfile: str | None = None, timeout: float = ..., - source_address: _SourceAddress | None = ..., - context: SSLContext | None = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, ) -> None: ... LMTP_PORT: int @@ -170,13 +170,17 @@ class LMTP(SMTP): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., - source_address: _SourceAddress | None = ..., + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, timeout: float = ..., ) -> None: ... else: def __init__( - self, host: str = ..., port: int = ..., local_hostname: str | None = ..., source_address: _SourceAddress | None = ... + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 678bdafb25f0..4481f398867c 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -112,7 +112,7 @@ from _socket import ( setdefaulttimeout as setdefaulttimeout, timeout as timeout, ) -from _typeshed import ReadableBuffer, Self, WriteableBuffer +from _typeshed import ReadableBuffer, Self, Unused, WriteableBuffer from collections.abc import Iterable from enum import IntEnum, IntFlag from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper @@ -655,10 +655,10 @@ class _SendableFile(Protocol): class socket(_socket.socket): def __init__( - self, family: AddressFamily | int = ..., type: SocketKind | int = ..., proto: int = ..., fileno: int | None = ... + self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None ) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def dup(self: Self) -> Self: ... # noqa: F811 def accept(self) -> tuple[socket, _RetAddress]: ... # Note that the makefile's documented windows-specific behavior is not represented @@ -669,39 +669,39 @@ class socket(_socket.socket): mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], buffering: Literal[0], *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> SocketIO: ... @overload def makefile( self, mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedRWPair: ... @overload def makefile( self, mode: Literal["rb", "br"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedReader: ... @overload def makefile( self, mode: Literal["wb", "bw"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedWriter: ... @overload def makefile( @@ -709,21 +709,21 @@ class socket(_socket.socket): mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], buffering: int, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> IOBase: ... @overload def makefile( self, - mode: Literal["r", "w", "rw", "wr", ""] = ..., - buffering: int | None = ..., + mode: Literal["r", "w", "rw", "wr", ""] = "r", + buffering: int | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIOWrapper: ... - def sendfile(self, file: _SendableFile, offset: int = ..., count: int | None = ...) -> int: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... @property def family(self) -> AddressFamily: ... # type: ignore[override] @property @@ -731,25 +731,24 @@ class socket(_socket.socket): def get_inheritable(self) -> bool: ... def set_inheritable(self, inheritable: bool) -> None: ... -def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = ...) -> socket: ... +def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... if sys.platform != "win32": if sys.version_info >= (3, 9): - # flags and address appear to be unused in send_fds and recv_fds def send_fds( - sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: int = ..., address: None = ... + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None ) -> int: ... - def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = ...) -> tuple[bytes, list[int], int, Any]: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... if sys.platform == "win32": def fromshare(info: bytes) -> socket: ... if sys.platform == "win32": - def socketpair(family: int = ..., type: int = ..., proto: int = ...) -> tuple[socket, socket]: ... + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... else: def socketpair( - family: int | AddressFamily | None = ..., type: SocketType | int = ..., proto: int = ... + family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 ) -> tuple[socket, socket]: ... class SocketIO(RawIOBase): @@ -761,34 +760,34 @@ class SocketIO(RawIOBase): @property def mode(self) -> Literal["rb", "wb", "rwb"]: ... -def getfqdn(name: str = ...) -> str: ... +def getfqdn(name: str = "") -> str: ... if sys.version_info >= (3, 11): def create_connection( address: tuple[str | None, int], timeout: float | None = ..., # noqa: F811 - source_address: _Address | None = ..., + source_address: _Address | None = None, *, - all_errors: bool = ..., + all_errors: bool = False, ) -> socket: ... else: def create_connection( - address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = ... # noqa: F811 + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None # noqa: F811 ) -> socket: ... if sys.version_info >= (3, 8): def has_dualstack_ipv6() -> bool: ... def create_server( - address: _Address, *, family: int = ..., backlog: int | None = ..., reuse_port: bool = ..., dualstack_ipv6: bool = ... + address: _Address, + *, + family: int = ..., + backlog: int | None = None, + reuse_port: bool = False, + dualstack_ipv6: bool = False, ) -> socket: ... # the 5th tuple item is an address def getaddrinfo( - host: bytes | str | None, - port: bytes | str | int | None, - family: int = ..., - type: int = ..., - proto: int = ..., - flags: int = ..., + host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index b5147d356ffe..b35f1553fb44 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -52,7 +52,7 @@ class BaseServer: def RequestHandlerClass(self: Self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... - def serve_forever(self, poll_interval: float = ...) -> None: ... + def serve_forever(self, poll_interval: float = 0.5) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... @@ -79,7 +79,7 @@ class TCPServer(BaseServer): self: Self, server_address: _AfInetAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + bind_and_activate: bool = True, ) -> None: ... def get_request(self) -> tuple[_socket, _RetAddress]: ... @@ -94,7 +94,7 @@ if sys.platform != "win32": self: Self, server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + bind_and_activate: bool = True, ) -> None: ... class UnixDatagramServer(BaseServer): @@ -103,7 +103,7 @@ if sys.platform != "win32": self: Self, server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + bind_and_activate: bool = True, ) -> None: ... if sys.platform != "win32": @@ -112,7 +112,7 @@ if sys.platform != "win32": active_children: set[int] | None # undocumented max_children: int # undocumented block_on_close: bool - def collect_children(self, *, blocking: bool = ...) -> None: ... # undocumented + def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented def handle_timeout(self) -> None: ... # undocumented def service_actions(self) -> None: ... # undocumented def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index efda3b671ed5..01274d6e2a60 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -1,6 +1,6 @@ import sqlite3 import sys -from _typeshed import Incomplete, ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem +from _typeshed import Incomplete, ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType @@ -227,7 +227,7 @@ else: if sys.version_info < (3, 8): class Cache: - def __init__(self, *args: Incomplete, **kwargs: object) -> None: ... + def __init__(self, *args: Incomplete, **kwargs: Unused) -> None: ... def display(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... def get(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... @@ -294,7 +294,7 @@ class Connection: ) -> None: ... def close(self) -> None: ... if sys.version_info >= (3, 11): - def blobopen(self, __table: str, __column: str, __row: int, *, readonly: bool = ..., name: str = ...) -> Blob: ... + def blobopen(self, __table: str, __column: str, __row: int, *, readonly: bool = False, name: str = "main") -> Blob: ... def commit(self) -> None: ... def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... @@ -318,7 +318,7 @@ class Connection: def create_collation(self, __name: str, __callback: Callable[[str, str], int | SupportsIndex] | None) -> None: ... if sys.version_info >= (3, 8): def create_function( - self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = ... + self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False ) -> None: ... else: def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData] | None) -> None: ... @@ -346,16 +346,16 @@ class Connection: self, target: Connection, *, - pages: int = ..., - progress: Callable[[int, int, int], object] | None = ..., - name: str = ..., - sleep: float = ..., + pages: int = -1, + progress: Callable[[int, int, int], object] | None = None, + name: str = "main", + sleep: float = 0.25, ) -> None: ... if sys.version_info >= (3, 11): def setlimit(self, __category: int, __limit: int) -> int: ... def getlimit(self, __category: int) -> int: ... - def serialize(self, *, name: str = ...) -> bytes: ... - def deserialize(self, __data: ReadableBuffer, *, name: str = ...) -> None: ... + def serialize(self, *, name: str = "main") -> bytes: ... + def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ... def __call__(self, __sql: str) -> _Statement: ... def __enter__(self: Self) -> Self: ... @@ -381,12 +381,12 @@ class Cursor(Iterator[Any]): def executemany(self: Self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... def executescript(self, __sql_script: str) -> Cursor: ... def fetchall(self) -> list[Any]: ... - def fetchmany(self, size: int | None = ...) -> list[Any]: ... + def fetchmany(self, size: int | None = 1) -> list[Any]: ... # Returns either a row (as created by the row_factory) or None, but # putting None in the return annotation causes annoying false positives. def fetchone(self) -> Any: ... - def setinputsizes(self, __sizes: object) -> None: ... # does nothing - def setoutputsize(self, __size: object, __column: object = ...) -> None: ... # does nothing + def setinputsizes(self, __sizes: Unused) -> None: ... # does nothing + def setoutputsize(self, __size: Unused, __column: Unused = None) -> None: ... # does nothing def __iter__(self: Self) -> Self: ... def __next__(self) -> Any: ... @@ -446,11 +446,11 @@ if sys.version_info >= (3, 11): @final class Blob: def close(self) -> None: ... - def read(self, __length: int = ...) -> bytes: ... + def read(self, __length: int = -1) -> bytes: ... def write(self, __data: ReadableBuffer) -> None: ... def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, __offset: int, __origin: int = ...) -> None: ... + def seek(self, __offset: int, __origin: int = 0) -> None: ... def __len__(self) -> int: ... def __enter__(self: Self) -> Self: ... def __exit__(self, __typ: object, __val: object, __tb: object) -> Literal[False]: ... diff --git a/mypy/typeshed/stdlib/sre_compile.pyi b/mypy/typeshed/stdlib/sre_compile.pyi index a9f4d577d5d1..2d04a886c931 100644 --- a/mypy/typeshed/stdlib/sre_compile.pyi +++ b/mypy/typeshed/stdlib/sre_compile.pyi @@ -8,4 +8,4 @@ MAXCODE: int def dis(code: list[_NamedIntConstant]) -> None: ... def isstring(obj: Any) -> bool: ... -def compile(p: str | bytes | SubPattern, flags: int = ...) -> Pattern[Any]: ... +def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ... diff --git a/mypy/typeshed/stdlib/sre_constants.pyi b/mypy/typeshed/stdlib/sre_constants.pyi index e7344fae3798..fe25eaf9728e 100644 --- a/mypy/typeshed/stdlib/sre_constants.pyi +++ b/mypy/typeshed/stdlib/sre_constants.pyi @@ -12,7 +12,7 @@ class error(Exception): pos: int | None lineno: int colno: int - def __init__(self, msg: str, pattern: str | bytes | None = ..., pos: int | None = ...) -> None: ... + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... class _NamedIntConstant(int): name: Any @@ -79,6 +79,10 @@ REPEAT: _NamedIntConstant REPEAT_ONE: _NamedIntConstant SUBPATTERN: _NamedIntConstant MIN_REPEAT_ONE: _NamedIntConstant +if sys.version_info >= (3, 11): + ATOMIC_GROUP: _NamedIntConstant + POSSESSIVE_REPEAT: _NamedIntConstant + POSSESSIVE_REPEAT_ONE: _NamedIntConstant RANGE_UNI_IGNORE: _NamedIntConstant GROUPREF_LOC_IGNORE: _NamedIntConstant GROUPREF_UNI_IGNORE: _NamedIntConstant diff --git a/mypy/typeshed/stdlib/sre_parse.pyi b/mypy/typeshed/stdlib/sre_parse.pyi index 3dcf8ad78dee..56f10bb41d57 100644 --- a/mypy/typeshed/stdlib/sre_parse.pyi +++ b/mypy/typeshed/stdlib/sre_parse.pyi @@ -52,12 +52,12 @@ class SubPattern: if sys.version_info >= (3, 8): state: State - def __init__(self, state: State, data: list[_CodeType] | None = ...) -> None: ... + def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ... else: pattern: Pattern - def __init__(self, pattern: Pattern, data: list[_CodeType] | None = ...) -> None: ... + def __init__(self, pattern: Pattern, data: list[_CodeType] | None = None) -> None: ... - def dump(self, level: int = ...) -> None: ... + def dump(self, level: int = 0) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: int | slice) -> None: ... def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ... @@ -85,7 +85,7 @@ class Tokenizer: def pos(self) -> int: ... def tell(self) -> int: ... def seek(self, index: int) -> None: ... - def error(self, msg: str, offset: int = ...) -> _Error: ... + def error(self, msg: str, offset: int = 0) -> _Error: ... if sys.version_info >= (3, 11): def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... @@ -95,14 +95,14 @@ def fix_flags(src: str | bytes, flags: int) -> int: ... _TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] _TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] if sys.version_info >= (3, 8): - def parse(str: str, flags: int = ..., state: State | None = ...) -> SubPattern: ... + def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... @overload def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... @overload def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... else: - def parse(str: str, flags: int = ..., pattern: Pattern | None = ...) -> SubPattern: ... + def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... @overload def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... @overload diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 6d7df5e1c202..f8b97fb60eb7 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -46,22 +46,22 @@ CertificateError = SSLCertVerificationError def wrap_socket( sock: socket.socket, - keyfile: StrOrBytesPath | None = ..., - certfile: StrOrBytesPath | None = ..., - server_side: bool = ..., + keyfile: StrOrBytesPath | None = None, + certfile: StrOrBytesPath | None = None, + server_side: bool = False, cert_reqs: int = ..., ssl_version: int = ..., - ca_certs: str | None = ..., - do_handshake_on_connect: bool = ..., - suppress_ragged_eofs: bool = ..., - ciphers: str | None = ..., + ca_certs: str | None = None, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + ciphers: str | None = None, ) -> SSLSocket: ... def create_default_context( purpose: Purpose = ..., *, - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | ReadableBuffer | None = ..., + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> SSLContext: ... if sys.version_info >= (3, 10): @@ -69,13 +69,13 @@ if sys.version_info >= (3, 10): protocol: int | None = None, *, cert_reqs: int = ..., - check_hostname: bool = ..., + check_hostname: bool = False, purpose: Purpose = ..., - certfile: StrOrBytesPath | None = ..., - keyfile: StrOrBytesPath | None = ..., - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | ReadableBuffer | None = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> SSLContext: ... else: @@ -83,13 +83,13 @@ else: protocol: int = ..., *, cert_reqs: int = ..., - check_hostname: bool = ..., + check_hostname: bool = False, purpose: Purpose = ..., - certfile: StrOrBytesPath | None = ..., - keyfile: StrOrBytesPath | None = ..., - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | ReadableBuffer | None = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> SSLContext: ... _create_default_https_context: Callable[..., SSLContext] @@ -107,11 +107,11 @@ def cert_time_to_seconds(cert_time: str) -> int: ... if sys.version_info >= (3, 10): def get_server_certificate( - addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ..., timeout: float = ... + addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... ) -> str: ... else: - def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... @@ -315,24 +315,24 @@ class SSLSocket(socket.socket): def __init__(self, *args: Any, **kwargs: Any) -> None: ... def connect(self, addr: socket._Address) -> None: ... def connect_ex(self, addr: socket._Address) -> int: ... - def recv(self, buflen: int = ..., flags: int = ...) -> bytes: ... - def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ...) -> int: ... - def recvfrom(self, buflen: int = ..., flags: int = ...) -> tuple[bytes, socket._RetAddress]: ... + def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... + def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... def recvfrom_into( - self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ... + self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0 ) -> tuple[int, socket._RetAddress]: ... - def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... - def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... + def send(self, data: ReadableBuffer, flags: int = 0) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0) -> None: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = ...) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = None) -> int: ... @overload def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... - def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... def write(self, data: ReadableBuffer) -> int: ... - def do_handshake(self, block: bool = ...) -> None: ... # block is undocumented + def do_handshake(self, block: bool = False) -> None: ... # block is undocumented @overload - def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload @@ -340,7 +340,7 @@ class SSLSocket(socket.socket): def cipher(self) -> tuple[str, str, int] | None: ... def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... def compression(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = ...) -> bytes | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... def selected_alpn_protocol(self) -> str | None: ... def selected_npn_protocol(self) -> str | None: ... def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... @@ -378,21 +378,32 @@ class SSLContext: if sys.version_info >= (3, 8): keylog_filename: str post_handshake_auth: bool - def __new__(cls: type[Self], protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + if sys.version_info >= (3, 10): + security_level: int + if sys.version_info >= (3, 10): + # Using the default (None) for the `protocol` parameter is deprecated, + # but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted + def __new__(cls: type[Self], protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ... + else: + def __new__(cls: type[Self], protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + def cert_store_stats(self) -> dict[str, int]: ... def load_cert_chain( - self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = ..., password: _PasswordType | None = ... + self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None ) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( - self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | ReadableBuffer | None = ... + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> None: ... @overload - def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload - def get_ca_certs(self, binary_form: bool = ...) -> Any: ... + def get_ca_certs(self, binary_form: bool = False) -> Any: ... def get_ciphers(self) -> list[_Cipher]: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, __cipherlist: str) -> None: ... @@ -404,19 +415,19 @@ class SSLContext: def wrap_socket( self, sock: socket.socket, - server_side: bool = ..., - do_handshake_on_connect: bool = ..., - suppress_ragged_eofs: bool = ..., - server_hostname: str | None = ..., - session: SSLSession | None = ..., + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: SSLSession | None = None, ) -> SSLSocket: ... def wrap_bio( self, incoming: MemoryBIO, outgoing: MemoryBIO, - server_side: bool = ..., - server_hostname: str | None = ..., - session: SSLSession | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + session: SSLSession | None = None, ) -> SSLObject: ... def session_stats(self) -> dict[str, int]: ... @@ -430,10 +441,10 @@ class SSLObject: @property def session_reused(self) -> bool: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... def write(self, data: ReadableBuffer) -> int: ... @overload - def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload @@ -447,7 +458,7 @@ class SSLObject: def do_handshake(self) -> None: ... def unwrap(self) -> None: ... def version(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = ...) -> bytes | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... @@ -455,7 +466,7 @@ class SSLObject: class MemoryBIO: pending: int eof: bool - def read(self, __size: int = ...) -> bytes: ... + def read(self, __size: int = -1) -> bytes: ... def write(self, __buf: ReadableBuffer) -> int: ... def write_eof(self) -> None: ... diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi index a01665ad8227..4ef950b9b4de 100644 --- a/mypy/typeshed/stdlib/statistics.pyi +++ b/mypy/typeshed/stdlib/statistics.pyi @@ -37,7 +37,7 @@ _HashableT = TypeVar("_HashableT", bound=Hashable) class StatisticsError(ValueError): ... if sys.version_info >= (3, 11): - def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = ...) -> float: ... + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... elif sys.version_info >= (3, 8): def fmean(data: Iterable[SupportsFloat]) -> float: ... @@ -48,7 +48,7 @@ if sys.version_info >= (3, 8): def mean(data: Iterable[_NumberT]) -> _NumberT: ... if sys.version_info >= (3, 10): - def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = ...) -> _NumberT: ... + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... else: def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... @@ -58,30 +58,30 @@ def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichCompariso def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... if sys.version_info >= (3, 11): - def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = ...) -> float: ... + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... else: - def median_grouped(data: Iterable[_NumberT], interval: _NumberT = ...) -> _NumberT | float: ... + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... def mode(data: Iterable[_HashableT]) -> _HashableT: ... if sys.version_info >= (3, 8): def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... -def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = ...) -> _NumberT: ... -def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = ...) -> _NumberT: ... +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... if sys.version_info >= (3, 8): def quantiles( - data: Iterable[_NumberT], *, n: int = ..., method: Literal["inclusive", "exclusive"] = ... + data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" ) -> list[_NumberT]: ... -def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = ...) -> _NumberT: ... -def variance(data: Iterable[_NumberT], xbar: _NumberT | None = ...) -> _NumberT: ... +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... if sys.version_info >= (3, 8): class NormalDist: - def __init__(self, mu: float = ..., sigma: float = ...) -> None: ... + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... @property def mean(self) -> float: ... @property @@ -94,12 +94,12 @@ if sys.version_info >= (3, 8): def variance(self) -> float: ... @classmethod def from_samples(cls: type[Self], data: Iterable[SupportsFloat]) -> Self: ... - def samples(self, n: int, *, seed: Any | None = ...) -> list[float]: ... + def samples(self, n: int, *, seed: Any | None = None) -> list[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... def inv_cdf(self, p: float) -> float: ... def overlap(self, other: NormalDist) -> float: ... - def quantiles(self, n: int = ...) -> list[float]: ... + def quantiles(self, n: int = 4) -> list[float]: ... if sys.version_info >= (3, 9): def zscore(self, x: float) -> float: ... @@ -124,7 +124,7 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 11): def linear_regression( - __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = ... + __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = False ) -> LinearRegression: ... elif sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index 49802ce81019..dc9a449e0e39 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -30,7 +30,7 @@ punctuation: LiteralString printable: LiteralString whitespace: LiteralString -def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = ...) -> StrOrLiteralStr: ... +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... if sys.version_info >= (3, 9): _TemplateMetaclass: TypeAlias = type @@ -71,7 +71,7 @@ class Formatter: kwargs: Mapping[str, Any], used_args: set[int | str], recursion_depth: int, - auto_arg_index: int = ..., + auto_arg_index: int = 0, ) -> tuple[str, int]: ... def parse( self, format_string: StrOrLiteralStr diff --git a/mypy/typeshed/stdlib/struct.pyi b/mypy/typeshed/stdlib/struct.pyi index 02097384e0f7..4220cd825b76 100644 --- a/mypy/typeshed/stdlib/struct.pyi +++ b/mypy/typeshed/stdlib/struct.pyi @@ -9,7 +9,7 @@ class error(Exception): ... def pack(__fmt: str | bytes, *v: Any) -> bytes: ... def pack_into(__fmt: str | bytes, __buffer: WriteableBuffer, __offset: int, *v: Any) -> None: ... def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... -def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... +def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... def calcsize(__format: str | bytes) -> int: ... @@ -22,5 +22,5 @@ class Struct: def pack(self, *v: Any) -> bytes: ... def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... def unpack(self, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... - def unpack_from(self, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... def iter_unpack(self, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index c0b10a7781c3..35a7b7e34f6b 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -91,14 +91,7 @@ class CompletedProcess(Generic[_T]): # and writing all the overloads would be horrific. stdout: _T stderr: _T - # pyright ignore on __init__ because the TypeVar can technically be unsolved, but see comment above - def __init__( - self, - args: _CMD, - returncode: int, - stdout: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] - stderr: _T | None = ..., - ) -> None: ... + def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -125,13 +118,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -159,13 +152,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -193,13 +186,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -228,13 +221,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -262,13 +255,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: None = ..., errors: None = ..., - input: ReadableBuffer | None = ..., + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -296,13 +289,13 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _InputString | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -333,13 +326,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -366,13 +359,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -399,13 +392,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -433,13 +426,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -466,13 +459,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: None = ..., errors: None = ..., - input: ReadableBuffer | None = ..., + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -499,13 +492,13 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _InputString | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -535,13 +528,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -567,13 +560,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -599,13 +592,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -632,13 +625,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -664,13 +657,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: None = ..., errors: None = ..., - input: ReadableBuffer | None = ..., + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -696,13 +689,13 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _InputString | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -730,13 +723,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -758,13 +751,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -786,13 +779,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -815,13 +808,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -843,13 +836,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: None = ..., errors: None = ..., - input: ReadableBuffer | None = ..., + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[bytes]: ... @overload def run( @@ -871,13 +864,13 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _InputString | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[Any]: ... # Same args as Popen.__init__ @@ -902,7 +895,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -933,7 +926,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -963,7 +956,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -991,7 +984,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., ) -> int: ... @@ -1131,7 +1124,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1162,7 +1155,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str, errors: str | None = ..., @@ -1193,7 +1186,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str, @@ -1225,7 +1218,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1256,7 +1249,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: None = ..., errors: None = ..., @@ -1287,7 +1280,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1321,7 +1314,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1351,7 +1344,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str, errors: str | None = ..., @@ -1381,7 +1374,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str, @@ -1412,7 +1405,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1442,7 +1435,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: None = ..., errors: None = ..., @@ -1472,7 +1465,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1505,7 +1498,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1534,7 +1527,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str, errors: str | None = ..., @@ -1563,7 +1556,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str, @@ -1593,7 +1586,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1622,7 +1615,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: None = ..., errors: None = ..., @@ -1651,7 +1644,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1682,7 +1675,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1707,7 +1700,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str, errors: str | None = ..., @@ -1732,7 +1725,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str, @@ -1758,7 +1751,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1783,7 +1776,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: None = ..., errors: None = ..., @@ -1808,7 +1801,7 @@ else: start_new_session: bool = ..., pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., @@ -1822,7 +1815,9 @@ DEVNULL: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): - def __init__(self, cmd: _CMD, timeout: float, output: str | bytes | None = ..., stderr: str | bytes | None = ...) -> None: ... + def __init__( + self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... # morally: _CMD cmd: Any timeout: float @@ -1842,7 +1837,7 @@ class CalledProcessError(SubprocessError): stdout: Any stderr: Any def __init__( - self, returncode: int, cmd: _CMD, output: str | bytes | None = ..., stderr: str | bytes | None = ... + self, returncode: int, cmd: _CMD, output: str | bytes | None = None, stderr: str | bytes | None = None ) -> None: ... class Popen(Generic[AnyStr]): @@ -1860,188 +1855,188 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False, None] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... elif sys.version_info >= (3, 10): # pipesize is added in 3.10 @@ -2049,182 +2044,182 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False, None] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... elif sys.version_info >= (3, 9): # user, group, extra_groups, umask were added in 3.9 @@ -2232,336 +2227,336 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False, None] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... else: @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False, None] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool | None = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... def poll(self) -> int | None: ... - def wait(self, timeout: float | None = ...) -> int: ... + def wait(self, timeout: float | None = None) -> int: ... # morally the members of the returned tuple should be optional # TODO this should allow ReadableBuffer for Popen[bytes], but adding # overloads for that runs into a mypy bug (python/mypy#14070). - def communicate(self, input: AnyStr | None = ..., timeout: float | None = ...) -> tuple[AnyStr, AnyStr]: ... + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... @@ -2574,8 +2569,8 @@ class Popen(Generic[AnyStr]): # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... - def getoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... + def getstatusoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> str: ... else: def getstatusoutput(cmd: str | bytes) -> tuple[int, str]: ... @@ -2592,12 +2587,12 @@ if sys.platform == "win32": def __init__( self, *, - dwFlags: int = ..., - hStdInput: Any | None = ..., - hStdOutput: Any | None = ..., - hStdError: Any | None = ..., - wShowWindow: int = ..., - lpAttributeList: Mapping[str, Any] | None = ..., + dwFlags: int = 0, + hStdInput: Any | None = None, + hStdOutput: Any | None = None, + hStdError: Any | None = None, + wShowWindow: int = 0, + lpAttributeList: Mapping[str, Any] | None = None, ) -> None: ... dwFlags: int hStdInput: Any | None diff --git a/mypy/typeshed/stdlib/sunau.pyi b/mypy/typeshed/stdlib/sunau.pyi index 5b21cb03d4a3..7702443b0c1c 100644 --- a/mypy/typeshed/stdlib/sunau.pyi +++ b/mypy/typeshed/stdlib/sunau.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import Self, Unused from typing import IO, Any, NamedTuple, NoReturn, overload from typing_extensions import Literal, TypeAlias @@ -33,7 +33,7 @@ class _sunau_params(NamedTuple): class Au_read: def __init__(self, f: _File) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> IO[bytes] | None: ... def rewind(self) -> None: ... def close(self) -> None: ... @@ -53,7 +53,7 @@ class Au_read: class Au_write: def __init__(self, f: _File) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... @@ -78,7 +78,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): openfp = open diff --git a/mypy/typeshed/stdlib/symtable.pyi b/mypy/typeshed/stdlib/symtable.pyi index 98b62edbfc6a..304ae8bf8126 100644 --- a/mypy/typeshed/stdlib/symtable.pyi +++ b/mypy/typeshed/stdlib/symtable.pyi @@ -38,11 +38,11 @@ class Class(SymbolTable): class Symbol: if sys.version_info >= (3, 8): def __init__( - self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ..., *, module_scope: bool = ... + self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False ) -> None: ... def is_nonlocal(self) -> bool: ... else: - def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ...) -> None: ... + def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None) -> None: ... def get_name(self) -> str: ... def is_referenced(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index c3747235d628..725f66794cf6 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -218,7 +218,7 @@ version_info: _version_info def call_tracing(__func: Callable[..., _T], __args: Any) -> _T: ... def _clear_type_cache() -> None: ... def _current_frames() -> dict[int, FrameType]: ... -def _getframe(__depth: int = ...) -> FrameType: ... +def _getframe(__depth: int = 0) -> FrameType: ... def _debugmallocstats() -> None: ... def __displayhook__(__value: object) -> None: ... def __excepthook__(__exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None) -> None: ... @@ -227,7 +227,7 @@ def exc_info() -> OptExcInfo: ... if sys.version_info >= (3, 11): def exception() -> BaseException | None: ... -def exit(__status: _ExitCode = ...) -> NoReturn: ... +def exit(__status: _ExitCode = None) -> NoReturn: ... def getallocatedblocks() -> int: ... def getdefaultencoding() -> str: ... @@ -304,7 +304,7 @@ if sys.version_info >= (3, 8): exc_value: BaseException | None exc_traceback: TracebackType | None err_msg: str | None - object: _object | None + object: _object unraisablehook: Callable[[UnraisableHookArgs], Any] def __unraisablehook__(__unraisable: UnraisableHookArgs) -> Any: ... def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... diff --git a/mypy/typeshed/stdlib/sysconfig.pyi b/mypy/typeshed/stdlib/sysconfig.pyi index 4b6257b5f62e..7e29cf1326d6 100644 --- a/mypy/typeshed/stdlib/sysconfig.pyi +++ b/mypy/typeshed/stdlib/sysconfig.pyi @@ -28,8 +28,8 @@ if sys.version_info >= (3, 10): def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... def get_path_names() -> tuple[str, ...]: ... -def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> str: ... -def get_paths(scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> dict[str, str]: ... +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... @@ -39,6 +39,6 @@ if sys.version_info >= (3, 11): else: def is_python_build(check_home: bool = False) -> bool: ... -def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = ...) -> dict[str, Any]: ... +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 5ad5af7f20bd..0aca7956a580 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -89,10 +89,10 @@ PAX_NAME_FIELDS: set[str] ENCODING: str def open( - name: StrOrBytesPath | None = ..., - mode: str = ..., - fileobj: IO[bytes] | None = ..., # depends on mode - bufsize: int = ..., + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -127,19 +127,19 @@ class TarFile: offset: int # undocumented def __init__( self, - name: StrOrBytesPath | None = ..., - mode: Literal["r", "a", "w", "x"] = ..., - fileobj: _Fileobj | None = ..., - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - copybufsize: int | None = ..., # undocumented + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( @@ -149,10 +149,10 @@ class TarFile: @classmethod def open( cls: type[Self], - name: StrOrBytesPath | None = ..., - mode: str = ..., - fileobj: IO[bytes] | None = ..., # depends on mode - bufsize: int = ..., + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -168,8 +168,8 @@ class TarFile: def taropen( cls: type[Self], name: StrOrBytesPath | None, - mode: Literal["r", "a", "w", "x"] = ..., - fileobj: _Fileobj | None = ..., + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, *, compresslevel: int = ..., format: int | None = ..., @@ -186,9 +186,9 @@ class TarFile: def gzopen( cls: type[Self], name: StrOrBytesPath | None, - mode: Literal["r"] = ..., - fileobj: _GzipReadableFileobj | None = ..., - compresslevel: int = ..., + mode: Literal["r"] = "r", + fileobj: _GzipReadableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -205,8 +205,8 @@ class TarFile: cls: type[Self], name: StrOrBytesPath | None, mode: Literal["w", "x"], - fileobj: _GzipWritableFileobj | None = ..., - compresslevel: int = ..., + fileobj: _GzipWritableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -223,8 +223,8 @@ class TarFile: cls: type[Self], name: StrOrBytesPath | None, mode: Literal["w", "x"], - fileobj: _Bz2WritableFileobj | None = ..., - compresslevel: int = ..., + fileobj: _Bz2WritableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -240,9 +240,9 @@ class TarFile: def bz2open( cls: type[Self], name: StrOrBytesPath | None, - mode: Literal["r"] = ..., - fileobj: _Bz2ReadableFileobj | None = ..., - compresslevel: int = ..., + mode: Literal["r"] = "r", + fileobj: _Bz2ReadableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -257,9 +257,9 @@ class TarFile: def xzopen( cls: type[Self], name: StrOrBytesPath | None, - mode: Literal["r", "w", "x"] = ..., - fileobj: IO[bytes] | None = ..., - preset: int | None = ..., + mode: Literal["r", "w", "x"] = "r", + fileobj: IO[bytes] | None = None, + preset: int | None = None, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -273,16 +273,16 @@ class TarFile: def getmember(self, name: str) -> TarInfo: ... def getmembers(self) -> _list[TarInfo]: ... def getnames(self) -> _list[str]: ... - def list(self, verbose: bool = ..., *, members: _list[TarInfo] | None = ...) -> None: ... + def list(self, verbose: bool = True, *, members: _list[TarInfo] | None = None) -> None: ... def next(self) -> TarInfo | None: ... def extractall( - self, path: StrOrBytesPath = ..., members: Iterable[TarInfo] | None = ..., *, numeric_owner: bool = ... + self, path: StrOrBytesPath = ".", members: Iterable[TarInfo] | None = None, *, numeric_owner: bool = False ) -> None: ... def extract( - self, member: str | TarInfo, path: StrOrBytesPath = ..., set_attrs: bool = ..., *, numeric_owner: bool = ... + self, member: str | TarInfo, path: StrOrBytesPath = "", set_attrs: bool = True, *, numeric_owner: bool = False ) -> None: ... def _extract_member( - self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = ..., numeric_owner: bool = ... + self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = True, numeric_owner: bool = False ) -> None: ... # undocumented def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented @@ -297,14 +297,14 @@ class TarFile: def add( self, name: StrPath, - arcname: StrPath | None = ..., - recursive: bool = ..., + arcname: StrPath | None = None, + recursive: bool = True, *, - filter: Callable[[TarInfo], TarInfo | None] | None = ..., + filter: Callable[[TarInfo], TarInfo | None] | None = None, ) -> None: ... - def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = ...) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = None) -> None: ... def gettarinfo( - self, name: StrOrBytesPath | None = ..., arcname: str | None = ..., fileobj: IO[bytes] | None = ... + self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None ) -> TarInfo: ... def close(self) -> None: ... @@ -344,7 +344,7 @@ class TarInfo: uname: str gname: str pax_headers: Mapping[str, str] - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... @classmethod def frombuf(cls: Type[Self], buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod @@ -354,7 +354,11 @@ class TarInfo: @linkpath.setter def linkpath(self, linkname: str) -> None: ... def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... - def tobuf(self, format: int | None = ..., encoding: str | None = ..., errors: str = ...) -> bytes: ... + if sys.version_info >= (3, 8): + def tobuf(self, format: int | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + else: + def tobuf(self, format: int | None = 1, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + def create_ustar_header( self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str ) -> bytes: ... diff --git a/mypy/typeshed/stdlib/telnetlib.pyi b/mypy/typeshed/stdlib/telnetlib.pyi index 67ae5fcc8055..bcf9ef3693b2 100644 --- a/mypy/typeshed/stdlib/telnetlib.pyi +++ b/mypy/typeshed/stdlib/telnetlib.pyi @@ -88,15 +88,15 @@ NOOPT: bytes class Telnet: host: str | None # undocumented - def __init__(self, host: str | None = ..., port: int = ..., timeout: float = ...) -> None: ... - def open(self, host: str, port: int = ..., timeout: float = ...) -> None: ... + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... def msg(self, msg: str, *args: Any) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def close(self) -> None: ... def get_socket(self) -> socket.socket: ... def fileno(self) -> int: ... def write(self, buffer: bytes) -> None: ... - def read_until(self, match: bytes, timeout: float | None = ...) -> bytes: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... def read_all(self) -> bytes: ... def read_some(self) -> bytes: ... def read_very_eager(self) -> bytes: ... @@ -113,7 +113,7 @@ class Telnet: def mt_interact(self) -> None: ... def listener(self) -> None: ... def expect( - self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = ... + self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = None ) -> tuple[int, Match[bytes] | None, bytes]: ... def __enter__(self: Self) -> Self: ... def __exit__( diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index 2c096f0fb4de..9dc23be2557f 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -37,76 +37,76 @@ if sys.version_info >= (3, 8): @overload def NamedTemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[bytes]: ... @overload def NamedTemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[Any]: ... else: @overload def NamedTemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[bytes]: ... @overload def NamedTemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[Any]: ... if sys.platform == "win32": @@ -116,38 +116,38 @@ else: @overload def TemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[str]: ... @overload def TemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[bytes]: ... @overload def TemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[Any]: ... else: @overload @@ -185,7 +185,7 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool - def __init__(self, file: IO[AnyStr], name: str, delete: bool = ...) -> None: ... + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... @@ -235,44 +235,72 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): @overload def __init__( self: SpooledTemporaryFile[bytes], - max_size: int = ..., - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> None: ... @overload def __init__( self: SpooledTemporaryFile[str], - max_size: int = ..., - mode: _StrMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, *, - errors: str | None = ..., + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self, - max_size: int = ..., - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, *, - errors: str | None = ..., + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, ) -> None: ... @property def errors(self) -> str | None: ... @@ -280,38 +308,64 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): @overload def __init__( self: SpooledTemporaryFile[bytes], - max_size: int = ..., - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... @overload def __init__( self: SpooledTemporaryFile[str], - max_size: int = ..., - mode: _StrMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, + *, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... @overload def __init__( self, - max_size: int = ..., - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, + *, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... def rollover(self) -> None: ... @@ -337,7 +391,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def readlines(self, __hint: int = ...) -> list[AnyStr]: ... # type: ignore[override] def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... - def truncate(self, size: int | None = ...) -> None: ... # type: ignore[override] + def truncate(self, size: int | None = None) -> None: ... # type: ignore[override] def write(self, s: AnyStr) -> int: ... def writelines(self, iterable: Iterable[AnyStr]) -> None: ... # type: ignore[override] def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] @@ -355,27 +409,30 @@ class TemporaryDirectory(Generic[AnyStr]): @overload def __init__( self: TemporaryDirectory[str], - suffix: str | None = ..., - prefix: str | None = ..., - dir: StrPath | None = ..., - ignore_cleanup_errors: bool = ..., + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, ) -> None: ... @overload def __init__( self: TemporaryDirectory[bytes], - suffix: bytes | None = ..., - prefix: bytes | None = ..., - dir: BytesPath | None = ..., - ignore_cleanup_errors: bool = ..., + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, ) -> None: ... else: @overload def __init__( - self: TemporaryDirectory[str], suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ... + self: TemporaryDirectory[str], suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None ) -> None: ... @overload def __init__( - self: TemporaryDirectory[bytes], suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ... + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, ) -> None: ... def cleanup(self) -> None: ... @@ -387,19 +444,19 @@ class TemporaryDirectory(Generic[AnyStr]): # The overloads overlap, but they should still work fine. @overload def mkstemp( # type: ignore[misc] - suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ..., text: bool = ... + suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False ) -> tuple[int, str]: ... @overload def mkstemp( - suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ..., text: bool = ... + suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False ) -> tuple[int, bytes]: ... # The overloads overlap, but they should still work fine. @overload -def mkdtemp(suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ...) -> str: ... # type: ignore[misc] +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... # type: ignore[misc] @overload -def mkdtemp(suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ...) -> bytes: ... -def mktemp(suffix: str = ..., prefix: str = ..., dir: StrPath | None = ...) -> str: ... +def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... def gettempdirb() -> bytes: ... def gettempprefixb() -> bytes: ... def gettempdir() -> str: ... diff --git a/mypy/typeshed/stdlib/textwrap.pyi b/mypy/typeshed/stdlib/textwrap.pyi index 9e423cb5ce94..e4a5b7899e8e 100644 --- a/mypy/typeshed/stdlib/textwrap.pyi +++ b/mypy/typeshed/stdlib/textwrap.pyi @@ -27,19 +27,19 @@ class TextWrapper: x: str # leaked loop variable def __init__( self, - width: int = ..., - initial_indent: str = ..., - subsequent_indent: str = ..., - expand_tabs: bool = ..., - replace_whitespace: bool = ..., - fix_sentence_endings: bool = ..., - break_long_words: bool = ..., - drop_whitespace: bool = ..., - break_on_hyphens: bool = ..., - tabsize: int = ..., + width: int = 70, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + drop_whitespace: bool = True, + break_on_hyphens: bool = True, + tabsize: int = 8, *, - max_lines: int | None = ..., - placeholder: str = ..., + max_lines: int | None = None, + placeholder: str = " [...]", ) -> None: ... # Private methods *are* part of the documented API for subclasses. def _munge_whitespace(self, text: str) -> str: ... @@ -53,7 +53,7 @@ class TextWrapper: def wrap( text: str, - width: int = ..., + width: int = 70, *, initial_indent: str = ..., subsequent_indent: str = ..., @@ -69,7 +69,7 @@ def wrap( ) -> list[str]: ... def fill( text: str, - width: int = ..., + width: int = 70, *, initial_indent: str = ..., subsequent_indent: str = ..., @@ -100,4 +100,4 @@ def shorten( placeholder: str = ..., ) -> str: ... def dedent(text: str) -> str: ... -def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = ...) -> str: ... +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/threading.pyi b/mypy/typeshed/stdlib/threading.pyi index 6fb1ab99c833..c0b344fe757d 100644 --- a/mypy/typeshed/stdlib/threading.pyi +++ b/mypy/typeshed/stdlib/threading.pyi @@ -74,17 +74,17 @@ class Thread: daemon: bool def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., + kwargs: Mapping[str, Any] | None = None, *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def start(self) -> None: ... def run(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... if sys.version_info >= (3, 8): @property def native_id(self) -> int | None: ... # only available on some platforms @@ -111,7 +111,7 @@ class Lock: def locked(self) -> bool: ... class _RLock: - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @@ -119,27 +119,27 @@ class _RLock: RLock = _RLock class Condition: - def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = ...) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... # deprecated alias for notify_all() class Semaphore: _value: int - def __init__(self, value: int = ...) -> None: ... + def __init__(self, value: int = 1) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... - def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 9): - def release(self, n: int = ...) -> None: ... + def release(self, n: int = 1) -> None: ... else: def release(self) -> None: ... @@ -150,7 +150,7 @@ class Event: def isSet(self) -> bool: ... # deprecated alias for is_set() def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 8): from _thread import _excepthook, _ExceptHookArgs @@ -169,8 +169,8 @@ class Timer(Thread): self, interval: float, function: Callable[..., object], - args: Iterable[Any] | None = ..., - kwargs: Mapping[str, Any] | None = ..., + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, ) -> None: ... def cancel(self) -> None: ... @@ -181,8 +181,8 @@ class Barrier: def n_waiting(self) -> int: ... @property def broken(self) -> bool: ... - def __init__(self, parties: int, action: Callable[[], None] | None = ..., timeout: float | None = ...) -> None: ... - def wait(self, timeout: float | None = ...) -> int: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... diff --git a/mypy/typeshed/stdlib/timeit.pyi b/mypy/typeshed/stdlib/timeit.pyi index dda6cefed0f6..a5da943c8484 100644 --- a/mypy/typeshed/stdlib/timeit.pyi +++ b/mypy/typeshed/stdlib/timeit.pyi @@ -11,22 +11,22 @@ default_timer: _Timer class Timer: def __init__( - self, stmt: _Stmt = ..., setup: _Stmt = ..., timer: _Timer = ..., globals: dict[str, Any] | None = ... + self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None ) -> None: ... - def print_exc(self, file: IO[str] | None = ...) -> None: ... - def timeit(self, number: int = ...) -> float: ... - def repeat(self, repeat: int = ..., number: int = ...) -> list[float]: ... - def autorange(self, callback: Callable[[int, float], object] | None = ...) -> tuple[int, float]: ... + def print_exc(self, file: IO[str] | None = None) -> None: ... + def timeit(self, number: int = 1000000) -> float: ... + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... def timeit( - stmt: _Stmt = ..., setup: _Stmt = ..., timer: _Timer = ..., number: int = ..., globals: dict[str, Any] | None = ... + stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None ) -> float: ... def repeat( - stmt: _Stmt = ..., - setup: _Stmt = ..., + stmt: _Stmt = "pass", + setup: _Stmt = "pass", timer: _Timer = ..., - repeat: int = ..., - number: int = ..., - globals: dict[str, Any] | None = ..., + repeat: int = 5, + number: int = 1000000, + globals: dict[str, Any] | None = None, ) -> list[float]: ... -def main(args: Sequence[str] | None = ..., *, _wrap_timer: Callable[[_Timer], _Timer] | None = ...) -> None: ... +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index 699dfd2a408a..fdacf0097008 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -269,7 +269,7 @@ def NoDefaultRoot() -> None: ... _TraceMode: TypeAlias = Literal["array", "read", "write", "unset"] class Variable: - def __init__(self, master: Misc | None = ..., value: Incomplete | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: Incomplete | None = None, name: str | None = None) -> None: ... def set(self, value) -> None: ... initialize = set def get(self): ... @@ -283,30 +283,30 @@ class Variable: def __eq__(self, other: object) -> bool: ... class StringVar(Variable): - def __init__(self, master: Misc | None = ..., value: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... def set(self, value: str) -> None: ... initialize = set def get(self) -> str: ... class IntVar(Variable): - def __init__(self, master: Misc | None = ..., value: int | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... def set(self, value: int) -> None: ... initialize = set def get(self) -> int: ... class DoubleVar(Variable): - def __init__(self, master: Misc | None = ..., value: float | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... def set(self, value: float) -> None: ... initialize = set def get(self) -> float: ... class BooleanVar(Variable): - def __init__(self, master: Misc | None = ..., value: bool | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... def set(self, value: bool) -> None: ... initialize = set def get(self) -> bool: ... -def mainloop(n: int = ...) -> None: ... +def mainloop(n: int = 0) -> None: ... getint: Incomplete getdouble: Incomplete @@ -325,15 +325,15 @@ class Misc: children: dict[str, Widget] def destroy(self) -> None: ... def deletecommand(self, name: str) -> None: ... - def tk_strictMotif(self, boolean: Incomplete | None = ...): ... + def tk_strictMotif(self, boolean: Incomplete | None = None): ... def tk_bisque(self) -> None: ... def tk_setPalette(self, *args, **kw) -> None: ... - def wait_variable(self, name: str | Variable = ...) -> None: ... + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... waitvar = wait_variable - def wait_window(self, window: Misc | None = ...) -> None: ... - def wait_visibility(self, window: Misc | None = ...) -> None: ... - def setvar(self, name: str = ..., value: str = ...) -> None: ... - def getvar(self, name: str = ...): ... + def wait_window(self, window: Misc | None = None) -> None: ... + def wait_visibility(self, window: Misc | None = None) -> None: ... + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... + def getvar(self, name: str = "PY_VAR"): ... def getint(self, s): ... def getdouble(self, s): ... def getboolean(self, s): ... @@ -347,13 +347,13 @@ class Misc: def tk_focusNext(self) -> Misc | None: ... def tk_focusPrev(self) -> Misc | None: ... @overload - def after(self, ms: int, func: None = ...) -> None: ... + def after(self, ms: int, func: None = None) -> None: ... @overload def after(self, ms: int | Literal["idle"], func: Callable[..., object], *args: Any) -> str: ... # after_idle is essentially partialmethod(after, "idle") def after_idle(self, func: Callable[..., object], *args: Any) -> str: ... def after_cancel(self, id: str) -> None: ... - def bell(self, displayof: Literal[0] | Misc | None = ...) -> None: ... + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... @@ -363,42 +363,42 @@ class Misc: def grab_set_global(self) -> None: ... def grab_status(self) -> Literal["local", "global"] | None: ... def option_add( - self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = ... + self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None ) -> None: ... def option_clear(self) -> None: ... def option_get(self, name, className): ... - def option_readfile(self, fileName, priority: Incomplete | None = ...) -> None: ... + def option_readfile(self, fileName, priority: Incomplete | None = None) -> None: ... def selection_clear(self, **kw) -> None: ... def selection_get(self, **kw): ... def selection_handle(self, command, **kw) -> None: ... def selection_own(self, **kw) -> None: ... def selection_own_get(self, **kw): ... def send(self, interp, cmd, *args): ... - def lower(self, belowThis: Incomplete | None = ...) -> None: ... - def tkraise(self, aboveThis: Incomplete | None = ...) -> None: ... + def lower(self, belowThis: Incomplete | None = None) -> None: ... + def tkraise(self, aboveThis: Incomplete | None = None) -> None: ... lift = tkraise if sys.version_info >= (3, 11): def info_patchlevel(self) -> _VersionInfoType: ... - def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = ...) -> int: ... - def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = ...) -> str: ... + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... def winfo_cells(self) -> int: ... def winfo_children(self) -> list[Widget]: ... # Widget because it can't be Toplevel or Tk def winfo_class(self) -> str: ... def winfo_colormapfull(self) -> bool: ... - def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = ...) -> Misc | None: ... + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... def winfo_depth(self) -> int: ... def winfo_exists(self) -> bool: ... def winfo_fpixels(self, number: _ScreenUnits) -> float: ... def winfo_geometry(self) -> str: ... def winfo_height(self) -> int: ... def winfo_id(self) -> int: ... - def winfo_interps(self, displayof: Literal[0] | Misc | None = ...) -> tuple[str, ...]: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... def winfo_ismapped(self) -> bool: ... def winfo_manager(self) -> str: ... def winfo_name(self) -> str: ... def winfo_parent(self) -> str: ... # return value needs nametowidget() - def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = ...): ... + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... def winfo_pixels(self, number: _ScreenUnits) -> int: ... def winfo_pointerx(self) -> int: ... def winfo_pointerxy(self) -> tuple[int, int]: ... @@ -421,7 +421,7 @@ class Misc: def winfo_viewable(self) -> bool: ... def winfo_visual(self) -> str: ... def winfo_visualid(self) -> str: ... - def winfo_visualsavailable(self, includeids: int = ...) -> list[tuple[str, int]]: ... + def winfo_visualsavailable(self, includeids: int = False) -> list[tuple[str, int]]: ... def winfo_vrootheight(self) -> int: ... def winfo_vrootwidth(self) -> int: ... def winfo_vrootx(self) -> int: ... @@ -432,7 +432,7 @@ class Misc: def update(self) -> None: ... def update_idletasks(self) -> None: ... @overload - def bindtags(self, tagList: None = ...) -> tuple[str, ...]: ... + def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... @overload def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other @@ -440,49 +440,49 @@ class Misc: @overload def bind( self, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... # There's no way to know what type of widget bind_all and bind_class # callbacks will get, so those are Misc. @overload def bind_all( self, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind_class( self, className: str, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def unbind(self, sequence: str, funcid: str | None = ...) -> None: ... + def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def unbind(self, sequence: str, funcid: str | None = None) -> None: ... def unbind_all(self, sequence: str) -> None: ... def unbind_class(self, className: str, sequence: str) -> None: ... - def mainloop(self, n: int = ...) -> None: ... + def mainloop(self, n: int = 0) -> None: ... def quit(self) -> None: ... @property def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... def register( - self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = ..., needcleanup: int = ... + self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 ) -> str: ... def keys(self) -> list[str]: ... @overload @@ -490,14 +490,14 @@ class Misc: @overload def pack_propagate(self) -> None: ... propagate = pack_propagate - def grid_anchor(self, anchor: _Anchor | None = ...) -> None: ... + def grid_anchor(self, anchor: _Anchor | None = None) -> None: ... anchor = grid_anchor @overload def grid_bbox( - self, column: None = ..., row: None = ..., col2: None = ..., row2: None = ... + self, column: None = None, row: None = None, col2: None = None, row2: None = None ) -> tuple[int, int, int, int] | None: ... @overload - def grid_bbox(self, column: int, row: int, col2: None = ..., row2: None = ...) -> tuple[int, int, int, int] | None: ... + def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... @overload def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> tuple[int, int, int, int] | None: ... bbox = grid_bbox @@ -532,7 +532,7 @@ class Misc: size = grid_size # Widget because Toplevel or Tk is never a slave def pack_slaves(self) -> list[Widget]: ... - def grid_slaves(self, row: int | None = ..., column: int | None = ...) -> list[Widget]: ... + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... def place_slaves(self) -> list[Widget]: ... slaves = pack_slaves def event_add(self, virtual: str, *sequences: str) -> None: ... @@ -569,14 +569,14 @@ class Misc: x: _ScreenUnits = ..., y: _ScreenUnits = ..., ) -> None: ... - def event_info(self, virtual: str | None = ...) -> tuple[str, ...]: ... + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... def image_names(self) -> tuple[str, ...]: ... def image_types(self) -> tuple[str, ...]: ... # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... def __getitem__(self, key: str) -> Any: ... def cget(self, key: str) -> Any: ... - def configure(self, cnf: Any = ...) -> Any: ... + def configure(self, cnf: Any = None) -> Any: ... # TODO: config is an alias of configure, but adding that here creates lots of mypy errors class CallWrapper: @@ -613,7 +613,7 @@ class Wm: def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... @overload def wm_aspect( - self, minNumer: None = ..., minDenom: None = ..., maxNumer: None = ..., maxDenom: None = ... + self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None ) -> tuple[int, int, int, int] | None: ... aspect = wm_aspect @overload @@ -623,7 +623,7 @@ class Wm: @overload def wm_attributes(self, __option: str, __value, *__other_option_value_pairs: Any) -> None: ... attributes = wm_attributes - def wm_client(self, name: str | None = ...) -> str: ... + def wm_client(self, name: str | None = None) -> str: ... client = wm_client @overload def wm_colormapwindows(self) -> list[Misc]: ... @@ -632,91 +632,91 @@ class Wm: @overload def wm_colormapwindows(self, __first_wlist_item: Misc, *other_wlist_items: Misc) -> None: ... colormapwindows = wm_colormapwindows - def wm_command(self, value: str | None = ...) -> str: ... + def wm_command(self, value: str | None = None) -> str: ... command = wm_command # Some of these always return empty string, but return type is set to None to prevent accidentally using it def wm_deiconify(self) -> None: ... deiconify = wm_deiconify - def wm_focusmodel(self, model: Literal["active", "passive"] | None = ...) -> Literal["active", "passive", ""]: ... + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... focusmodel = wm_focusmodel def wm_forget(self, window: Wm) -> None: ... forget = wm_forget def wm_frame(self) -> str: ... frame = wm_frame @overload - def wm_geometry(self, newGeometry: None = ...) -> str: ... + def wm_geometry(self, newGeometry: None = None) -> str: ... @overload def wm_geometry(self, newGeometry: str) -> None: ... geometry = wm_geometry def wm_grid( self, - baseWidth: Incomplete | None = ..., - baseHeight: Incomplete | None = ..., - widthInc: Incomplete | None = ..., - heightInc: Incomplete | None = ..., + baseWidth: Incomplete | None = None, + baseHeight: Incomplete | None = None, + widthInc: Incomplete | None = None, + heightInc: Incomplete | None = None, ): ... grid = wm_grid - def wm_group(self, pathName: Incomplete | None = ...): ... + def wm_group(self, pathName: Incomplete | None = None): ... group = wm_group - def wm_iconbitmap(self, bitmap: Incomplete | None = ..., default: Incomplete | None = ...): ... + def wm_iconbitmap(self, bitmap: Incomplete | None = None, default: Incomplete | None = None): ... iconbitmap = wm_iconbitmap def wm_iconify(self) -> None: ... iconify = wm_iconify - def wm_iconmask(self, bitmap: Incomplete | None = ...): ... + def wm_iconmask(self, bitmap: Incomplete | None = None): ... iconmask = wm_iconmask - def wm_iconname(self, newName: Incomplete | None = ...) -> str: ... + def wm_iconname(self, newName: Incomplete | None = None) -> str: ... iconname = wm_iconname def wm_iconphoto(self, default: bool, __image1: Image, *args: Image) -> None: ... iconphoto = wm_iconphoto - def wm_iconposition(self, x: int | None = ..., y: int | None = ...) -> tuple[int, int] | None: ... + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... iconposition = wm_iconposition - def wm_iconwindow(self, pathName: Incomplete | None = ...): ... + def wm_iconwindow(self, pathName: Incomplete | None = None): ... iconwindow = wm_iconwindow def wm_manage(self, widget) -> None: ... manage = wm_manage @overload - def wm_maxsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload - def wm_minsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @overload - def wm_overrideredirect(self, boolean: None = ...) -> bool | None: ... # returns True or None + def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect - def wm_positionfrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... positionfrom = wm_positionfrom @overload def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... @overload - def wm_protocol(self, name: str, func: None = ...) -> str: ... + def wm_protocol(self, name: str, func: None = None) -> str: ... @overload - def wm_protocol(self, name: None = ..., func: None = ...) -> tuple[str, ...]: ... + def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... protocol = wm_protocol @overload - def wm_resizable(self, width: None = ..., height: None = ...) -> tuple[bool, bool]: ... + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable - def wm_sizefrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... sizefrom = wm_sizefrom @overload - def wm_state(self, newstate: None = ...) -> str: ... + def wm_state(self, newstate: None = None) -> str: ... @overload def wm_state(self, newstate: str) -> None: ... state = wm_state @overload - def wm_title(self, string: None = ...) -> str: ... + def wm_title(self, string: None = None) -> str: ... @overload def wm_title(self, string: str) -> None: ... title = wm_title @overload - def wm_transient(self, master: None = ...) -> _tkinter.Tcl_Obj: ... + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... @overload def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... transient = wm_transient @@ -733,17 +733,17 @@ class Tk(Misc, Wm): # args. # use `git grep screenName` to find them self, - screenName: str | None = ..., - baseName: str | None = ..., - className: str = ..., - useTk: bool = ..., - sync: bool = ..., - use: str | None = ..., + screenName: str | None = None, + baseName: str | None = None, + className: str = "Tk", + useTk: bool = True, + sync: bool = False, + use: str | None = None, ) -> None: ... @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -800,7 +800,7 @@ class Tk(Misc, Wm): def wantobjects(self, *args, **kwargs): ... def willdispatch(self): ... -def Tcl(screenName: str | None = ..., baseName: str | None = ..., className: str = ..., useTk: bool = ...) -> Tk: ... +def Tcl(screenName: str | None = None, baseName: str | None = None, className: str = "Tk", useTk: bool = False) -> Tk: ... _InMiscTotal = TypedDict("_InMiscTotal", {"in": Misc}) _InMiscNonTotal = TypedDict("_InMiscNonTotal", {"in": Misc}, total=False) @@ -931,14 +931,14 @@ class Widget(BaseWidget, Pack, Place, Grid): @overload def bind( self: _W, - sequence: str | None = ..., - func: Callable[[Event[_W]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[_W]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... class Toplevel(BaseWidget, Wm): # Toplevel and Tk have the same options because they correspond to the same @@ -946,7 +946,7 @@ class Toplevel(BaseWidget, Wm): # copy/pasted here instead of aliasing as 'config = Tk.config'. def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -976,7 +976,7 @@ class Toplevel(BaseWidget, Wm): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1002,7 +1002,7 @@ class Toplevel(BaseWidget, Wm): class Button(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -1051,7 +1051,7 @@ class Button(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -1099,7 +1099,7 @@ class Button(Widget): class Canvas(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -1142,7 +1142,7 @@ class Canvas(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1187,8 +1187,8 @@ class Canvas(Widget, XView, YView): newtag: str, x: _ScreenUnits, y: _ScreenUnits, - halo: _ScreenUnits | None = ..., - start: str | _CanvasItemId | None = ..., + halo: _ScreenUnits | None = None, + start: str | _CanvasItemId | None = None, ) -> None: ... def addtag_enclosed(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... def addtag_overlapping(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... @@ -1198,7 +1198,7 @@ class Canvas(Widget, XView, YView): def find_all(self) -> tuple[_CanvasItemId, ...]: ... def find_below(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... def find_closest( - self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = ..., start: str | _CanvasItemId | None = ... + self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | _CanvasItemId | None = None ) -> tuple[_CanvasItemId, ...]: ... def find_enclosed( self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits @@ -1211,19 +1211,19 @@ class Canvas(Widget, XView, YView): def tag_bind( self, tagOrId: str | _CanvasItemId, - sequence: str | None = ..., - func: Callable[[Event[Canvas]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Canvas]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload def tag_bind( - self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ... + self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None ) -> None: ... @overload - def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = ...) -> None: ... - def canvasx(self, screenx, gridspacing: Incomplete | None = ...): ... - def canvasy(self, screeny, gridspacing: Incomplete | None = ...): ... + def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = None) -> None: ... + def canvasx(self, screenx, gridspacing: Incomplete | None = None): ... + def canvasy(self, screeny, gridspacing: Incomplete | None = None): ... @overload def coords(self, __tagOrId: str | _CanvasItemId) -> list[float]: ... @overload @@ -1714,12 +1714,12 @@ class Canvas(Widget, XView, YView): def itemcget(self, tagOrId, option): ... # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( - self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = ..., **kw: Any + self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, str, str]] | None: ... itemconfig = itemconfigure def move(self, *args) -> None: ... if sys.version_info >= (3, 8): - def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = ..., y: Literal[""] | float = ...) -> None: ... + def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... def postscript(self, cnf=..., **kw): ... # tkinter does: @@ -1734,7 +1734,7 @@ class Canvas(Widget, XView, YView): def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] def scale(self, *args) -> None: ... def scan_mark(self, x, y) -> None: ... - def scan_dragto(self, x, y, gain: int = ...) -> None: ... + def scan_dragto(self, x, y, gain: int = 10) -> None: ... def select_adjust(self, tagOrId, index) -> None: ... def select_clear(self) -> None: ... def select_from(self, tagOrId, index) -> None: ... @@ -1745,7 +1745,7 @@ class Canvas(Widget, XView, YView): class Checkbutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -1805,7 +1805,7 @@ class Checkbutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -1864,7 +1864,7 @@ _EntryIndex: TypeAlias = str | int # "INDICES" in manual page class Entry(Widget, XView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -1909,7 +1909,7 @@ class Entry(Widget, XView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1952,7 +1952,7 @@ class Entry(Widget, XView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def delete(self, first: _EntryIndex, last: _EntryIndex | None = ...) -> None: ... + def delete(self, first: _EntryIndex, last: _EntryIndex | None = None) -> None: ... def get(self) -> str: ... def icursor(self, index: _EntryIndex) -> None: ... def index(self, index: _EntryIndex) -> int: ... @@ -1975,7 +1975,7 @@ class Entry(Widget, XView): class Frame(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -2002,7 +2002,7 @@ class Frame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -2027,7 +2027,7 @@ class Frame(Widget): class Label(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2066,7 +2066,7 @@ class Label(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2107,7 +2107,7 @@ class Label(Widget): class Listbox(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activestyle: Literal["dotbox", "none", "underline"] = ..., @@ -2158,7 +2158,7 @@ class Listbox(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activestyle: Literal["dotbox", "none", "underline"] = ..., background: _Color = ..., @@ -2196,8 +2196,8 @@ class Listbox(Widget, XView, YView): def activate(self, index: str | int) -> None: ... def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] def curselection(self): ... - def delete(self, first: str | int, last: str | int | None = ...) -> None: ... - def get(self, first: str | int, last: str | int | None = ...): ... + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self, first: str | int, last: str | int | None = None): ... def index(self, index: str | int) -> int: ... def insert(self, index: str | int, *elements: str | float) -> None: ... def nearest(self, y): ... @@ -2206,21 +2206,21 @@ class Listbox(Widget, XView, YView): def see(self, index: str | int) -> None: ... def selection_anchor(self, index: str | int) -> None: ... select_anchor = selection_anchor - def selection_clear(self, first: str | int, last: str | int | None = ...) -> None: ... # type: ignore[override] + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] select_clear = selection_clear def selection_includes(self, index: str | int): ... select_includes = selection_includes - def selection_set(self, first: str | int, last: str | int | None = ...) -> None: ... + def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... select_set = selection_set def size(self) -> int: ... # type: ignore[override] def itemcget(self, index: str | int, option): ... - def itemconfigure(self, index: str | int, cnf: Incomplete | None = ..., **kw): ... + def itemconfigure(self, index: str | int, cnf: Incomplete | None = None, **kw): ... itemconfig = itemconfigure class Menu(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2252,7 +2252,7 @@ class Menu(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeborderwidth: _ScreenUnits = ..., @@ -2279,7 +2279,7 @@ class Menu(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def tk_popup(self, x: int, y: int, entry: str | int = ...) -> None: ... + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... def activate(self, index: str | int) -> None: ... def add(self, itemType, cnf=..., **kw): ... # docstring says "Internal function." def insert(self, index, itemType, cnf=..., **kw): ... # docstring says "Internal function." @@ -2473,10 +2473,10 @@ class Menu(Widget): variable: Variable = ..., ) -> None: ... def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... - def delete(self, index1: str | int, index2: str | int | None = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... def entrycget(self, index: str | int, option: str) -> Any: ... def entryconfigure( - self, index: str | int, cnf: dict[str, Any] | None = ..., **kw: Any + self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... entryconfig = entryconfigure def index(self, index: str | int) -> int | None: ... @@ -2490,7 +2490,7 @@ class Menu(Widget): class Menubutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2532,7 +2532,7 @@ class Menubutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2576,7 +2576,7 @@ class Menubutton(Widget): class Message(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, anchor: _Anchor = ..., @@ -2607,7 +2607,7 @@ class Message(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, anchor: _Anchor = ..., aspect: int = ..., @@ -2639,7 +2639,7 @@ class Message(Widget): class Radiobutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2688,7 +2688,7 @@ class Radiobutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2743,7 +2743,7 @@ class Radiobutton(Widget): class Scale(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2786,7 +2786,7 @@ class Scale(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., background: _Color = ..., @@ -2828,13 +2828,13 @@ class Scale(Widget): config = configure def get(self) -> float: ... def set(self, value) -> None: ... - def coords(self, value: float | None = ...) -> tuple[int, int]: ... + def coords(self, value: float | None = None) -> tuple[int, int]: ... def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... class Scrollbar(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2867,7 +2867,7 @@ class Scrollbar(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activerelief: _Relief = ..., @@ -2894,7 +2894,7 @@ class Scrollbar(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index: Incomplete | None = ...): ... + def activate(self, index: Incomplete | None = None): ... def delta(self, deltax: int, deltay: int) -> float: ... def fraction(self, x: int, y: int) -> float: ... def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... @@ -2906,7 +2906,7 @@ _TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc class Text(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, autoseparators: bool = ..., @@ -2963,7 +2963,7 @@ class Text(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, autoseparators: bool = ..., background: _Color = ..., @@ -3018,17 +3018,17 @@ class Text(Widget, XView, YView): def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ... def count(self, index1, index2, *args): ... # TODO @overload - def debug(self, boolean: None = ...) -> bool: ... + def debug(self, boolean: None = None) -> bool: ... @overload def debug(self, boolean: bool) -> None: ... - def delete(self, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... + def delete(self, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... def dlineinfo(self, index: _TextIndex) -> tuple[int, int, int, int, int] | None: ... @overload def dump( self, index1: _TextIndex, - index2: _TextIndex | None = ..., - command: None = ..., + index2: _TextIndex | None = None, + command: None = None, *, all: bool = ..., image: bool = ..., @@ -3055,7 +3055,7 @@ class Text(Widget, XView, YView): def dump( self, index1: _TextIndex, - index2: _TextIndex | None = ..., + index2: _TextIndex | None = None, *, command: Callable[[str, str, str], object] | str, all: bool = ..., @@ -3067,23 +3067,23 @@ class Text(Widget, XView, YView): ) -> None: ... def edit(self, *args): ... # docstring says "Internal method" @overload - def edit_modified(self, arg: None = ...) -> bool: ... # actually returns Literal[0, 1] + def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] @overload def edit_modified(self, arg: bool) -> None: ... # actually returns empty string def edit_redo(self) -> None: ... # actually returns empty string def edit_reset(self) -> None: ... # actually returns empty string def edit_separator(self) -> None: ... # actually returns empty string def edit_undo(self) -> None: ... # actually returns empty string - def get(self, index1: _TextIndex, index2: _TextIndex | None = ...) -> str: ... + def get(self, index1: _TextIndex, index2: _TextIndex | None = None) -> str: ... # TODO: image_* methods def image_cget(self, index, option): ... - def image_configure(self, index, cnf: Incomplete | None = ..., **kw): ... + def image_configure(self, index, cnf: Incomplete | None = None, **kw): ... def image_create(self, index, cnf=..., **kw): ... def image_names(self): ... def index(self, index: _TextIndex) -> str: ... def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... @overload - def mark_gravity(self, markName: str, direction: None = ...) -> Literal["left", "right"]: ... + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string def mark_names(self) -> tuple[str, ...]: ... @@ -3101,14 +3101,14 @@ class Text(Widget, XView, YView): self, pattern: str, index: _TextIndex, - stopindex: _TextIndex | None = ..., - forwards: bool | None = ..., - backwards: bool | None = ..., - exact: bool | None = ..., - regexp: bool | None = ..., - nocase: bool | None = ..., - count: Variable | None = ..., - elide: bool | None = ..., + stopindex: _TextIndex | None = None, + forwards: bool | None = None, + backwards: bool | None = None, + exact: bool | None = None, + regexp: bool | None = None, + nocase: bool | None = None, + count: Variable | None = None, + elide: bool | None = None, ) -> str: ... # returns empty string for not found def see(self, index: _TextIndex) -> None: ... def tag_add(self, tagName: str, index1: _TextIndex, *args: _TextIndex) -> None: ... @@ -3119,18 +3119,18 @@ class Text(Widget, XView, YView): tagName: str, sequence: str | None, func: Callable[[Event[Text]], object] | None, - add: Literal["", "+"] | bool | None = ..., + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = ...) -> None: ... + def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... # allowing any string for cget instead of just Literals because there's no other way to look up tag options def tag_cget(self, tagName: str, option: str): ... @overload def tag_configure( self, tagName: str, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bgstipple: _Bitmap = ..., @@ -3165,24 +3165,28 @@ class Text(Widget, XView, YView): def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure def tag_delete(self, __first_tag_name: str, *tagNames: str) -> None: ... # error if no tag names given - def tag_lower(self, tagName: str, belowThis: str | None = ...) -> None: ... - def tag_names(self, index: _TextIndex | None = ...) -> tuple[str, ...]: ... - def tag_nextrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... - def tag_prevrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... - def tag_raise(self, tagName: str, aboveThis: str | None = ...) -> None: ... + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... + def tag_names(self, index: _TextIndex | None = None) -> tuple[str, ...]: ... + def tag_nextrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... # tag_remove and tag_delete are different - def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... + def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... # TODO: window_* methods def window_cget(self, index, option): ... - def window_configure(self, index, cnf: Incomplete | None = ..., **kw): ... + def window_configure(self, index, cnf: Incomplete | None = None, **kw): ... window_config = window_configure def window_create(self, index, cnf=..., **kw) -> None: ... def window_names(self): ... def yview_pickplace(self, *what): ... # deprecated class _setit: - def __init__(self, var, value, callback: Incomplete | None = ...) -> None: ... + def __init__(self, var, value, callback: Incomplete | None = None) -> None: ... def __call__(self, *args) -> None: ... # manual page: tk_optionMenu @@ -3211,7 +3215,7 @@ class Image: name: Incomplete tk: _tkinter.TkappType def __init__( - self, imgtype, name: Incomplete | None = ..., cnf=..., master: Misc | _tkinter.TkappType | None = ..., **kw + self, imgtype, name: Incomplete | None = None, cnf=..., master: Misc | _tkinter.TkappType | None = None, **kw ) -> None: ... def __del__(self) -> None: ... def __setitem__(self, key, value) -> None: ... @@ -3225,9 +3229,9 @@ class Image: class PhotoImage(Image): def __init__( self, - name: str | None = ..., + name: str | None = None, cnf: dict[str, Any] = ..., - master: Misc | _tkinter.TkappType | None = ..., + master: Misc | _tkinter.TkappType | None = None, *, data: str | bytes = ..., # not same as data argument of put() format: str = ..., @@ -3253,8 +3257,8 @@ class PhotoImage(Image): def cget(self, option: str) -> str: ... def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' def copy(self) -> PhotoImage: ... - def zoom(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... - def subsample(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... def get(self, x: int, y: int) -> tuple[int, int, int]: ... def put( self, @@ -3267,9 +3271,9 @@ class PhotoImage(Image): | tuple[list[_Color], ...] | tuple[tuple[_Color, ...], ...] ), - to: tuple[int, int] | None = ..., + to: tuple[int, int] | None = None, ) -> None: ... - def write(self, filename: StrOrBytesPath, format: str | None = ..., from_coords: tuple[int, int] | None = ...) -> None: ... + def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: ... if sys.version_info >= (3, 8): def transparency_get(self, x: int, y: int) -> bool: ... def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... @@ -3277,9 +3281,9 @@ class PhotoImage(Image): class BitmapImage(Image): def __init__( self, - name: Incomplete | None = ..., + name: Incomplete | None = None, cnf: dict[str, Any] = ..., - master: Misc | _tkinter.TkappType | None = ..., + master: Misc | _tkinter.TkappType | None = None, *, background: _Color = ..., data: str | bytes = ..., @@ -3295,7 +3299,7 @@ def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -3354,7 +3358,7 @@ class Spinbox(Widget, XView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., background: _Color = ..., @@ -3411,7 +3415,7 @@ class Spinbox(Widget, XView): def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] - def delete(self, first, last: Incomplete | None = ...) -> Literal[""]: ... + def delete(self, first, last: Incomplete | None = None) -> Literal[""]: ... def get(self) -> str: ... def icursor(self, index): ... def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... @@ -3425,7 +3429,7 @@ class Spinbox(Widget, XView): def selection(self, *args) -> tuple[int, ...]: ... def selection_adjust(self, index): ... def selection_clear(self): ... - def selection_element(self, element: Incomplete | None = ...): ... + def selection_element(self, element: Incomplete | None = None): ... if sys.version_info >= (3, 8): def selection_from(self, index: int) -> None: ... def selection_present(self) -> None: ... @@ -3435,7 +3439,7 @@ class Spinbox(Widget, XView): class LabelFrame(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -3469,7 +3473,7 @@ class LabelFrame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -3500,7 +3504,7 @@ class LabelFrame(Widget): class PanedWindow(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -3529,7 +3533,7 @@ class PanedWindow(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -3569,7 +3573,7 @@ class PanedWindow(Widget): def sash_mark(self, index): ... def sash_place(self, index, x, y): ... def panecget(self, child, option): ... - def paneconfigure(self, tagOrId, cnf: Incomplete | None = ..., **kw): ... + def paneconfigure(self, tagOrId, cnf: Incomplete | None = None, **kw): ... paneconfig: Incomplete def panes(self): ... diff --git a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi index 47eb222590c6..4300d94f58e8 100644 --- a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi @@ -11,10 +11,10 @@ class Chooser(Dialog): if sys.version_info >= (3, 9): def askcolor( - color: str | bytes | None = ..., *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... else: def askcolor( - color: str | bytes | None = ..., *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... ) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... diff --git a/mypy/typeshed/stdlib/tkinter/commondialog.pyi b/mypy/typeshed/stdlib/tkinter/commondialog.pyi index edae62582237..eba3ab5be3bd 100644 --- a/mypy/typeshed/stdlib/tkinter/commondialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/commondialog.pyi @@ -10,5 +10,5 @@ class Dialog: command: ClassVar[str | None] master: Incomplete | None options: Mapping[str, Incomplete] - def __init__(self, master: Incomplete | None = ..., **options: Incomplete) -> None: ... + def __init__(self, master: Incomplete | None = None, **options: Incomplete) -> None: ... def show(self, **options: Incomplete) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/tkinter/dialog.pyi b/mypy/typeshed/stdlib/tkinter/dialog.pyi index 032dac2c15a2..8825188c767e 100644 --- a/mypy/typeshed/stdlib/tkinter/dialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -12,5 +12,5 @@ DIALOG_ICON: str class Dialog(Widget): widgetName: str num: int - def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... + def __init__(self, master: Incomplete | None = None, cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... def destroy(self) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi index ad7972968f81..4a6ab42b3e33 100644 --- a/mypy/typeshed/stdlib/tkinter/dnd.pyi +++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi @@ -11,8 +11,8 @@ class _DndSource(Protocol): class DndHandler: root: ClassVar[Tk | None] def __init__(self, source: _DndSource, event: Event[Misc]) -> None: ... - def cancel(self, event: Event[Misc] | None = ...) -> None: ... - def finish(self, event: Event[Misc] | None, commit: int = ...) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def finish(self, event: Event[Misc] | None, commit: int = 0) -> None: ... def on_motion(self, event: Event[Misc]) -> None: ... def on_release(self, event: Event[Misc]) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/filedialog.pyi b/mypy/typeshed/stdlib/tkinter/filedialog.pyi index d0b7e451f72c..10b36e4d3c06 100644 --- a/mypy/typeshed/stdlib/tkinter/filedialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/filedialog.pyi @@ -41,21 +41,21 @@ class FileDialog: filter_button: Button cancel_button: Button def __init__( - self, master, title: Incomplete | None = ... + self, master, title: Incomplete | None = None ) -> None: ... # title is usually a str or None, but e.g. int doesn't raise en exception either how: Incomplete | None - def go(self, dir_or_file=..., pattern: str = ..., default: str = ..., key: Incomplete | None = ...): ... - def quit(self, how: Incomplete | None = ...) -> None: ... + def go(self, dir_or_file=".", pattern: str = "*", default: str = "", key: Incomplete | None = None): ... + def quit(self, how: Incomplete | None = None) -> None: ... def dirs_double_event(self, event) -> None: ... def dirs_select_event(self, event) -> None: ... def files_double_event(self, event) -> None: ... def files_select_event(self, event) -> None: ... def ok_event(self, event) -> None: ... def ok_command(self) -> None: ... - def filter_command(self, event: Incomplete | None = ...) -> None: ... + def filter_command(self, event: Incomplete | None = None) -> None: ... def get_filter(self): ... def get_selection(self): ... - def cancel_command(self, event: Incomplete | None = ...) -> None: ... + def cancel_command(self, event: Incomplete | None = None) -> None: ... def set_filter(self, dir, pat) -> None: ... def set_selection(self, file) -> None: ... @@ -116,7 +116,7 @@ def askdirectory( # TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) def asksaveasfile( - mode: str = ..., + mode: str = "w", *, confirmoverwrite: bool | None = ..., defaultextension: str | None = ..., @@ -128,7 +128,7 @@ def asksaveasfile( typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: ... def askopenfile( - mode: str = ..., + mode: str = "r", *, defaultextension: str | None = ..., filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., @@ -139,7 +139,7 @@ def askopenfile( typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: ... def askopenfiles( - mode: str = ..., + mode: str = "r", *, defaultextension: str | None = ..., filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., diff --git a/mypy/typeshed/stdlib/tkinter/font.pyi b/mypy/typeshed/stdlib/tkinter/font.pyi index dff84e9fac78..0a557e921914 100644 --- a/mypy/typeshed/stdlib/tkinter/font.pyi +++ b/mypy/typeshed/stdlib/tkinter/font.pyi @@ -41,10 +41,10 @@ class Font: self, # In tkinter, 'root' refers to tkinter.Tk by convention, but the code # actually works with any tkinter widget so we use tkinter.Misc. - root: tkinter.Misc | None = ..., - font: _FontDescription | None = ..., - name: str | None = ..., - exists: bool = ..., + root: tkinter.Misc | None = None, + font: _FontDescription | None = None, + name: str | None = None, + exists: bool = False, *, family: str = ..., size: int = ..., @@ -68,19 +68,19 @@ class Font: def cget(self, option: str) -> Any: ... __getitem__ = cget @overload - def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = ...) -> str: ... + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... @overload - def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = ...) -> int: ... + def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... @overload - def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = ...) -> Literal["normal", "bold"]: ... + def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = None) -> Literal["normal", "bold"]: ... @overload - def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = ...) -> Literal["roman", "italic"]: ... + def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = None) -> Literal["roman", "italic"]: ... @overload - def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = ...) -> bool: ... + def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = None) -> bool: ... @overload - def actual(self, option: None, displayof: tkinter.Misc | None = ...) -> _FontDict: ... + def actual(self, option: None, displayof: tkinter.Misc | None = None) -> _FontDict: ... @overload - def actual(self, *, displayof: tkinter.Misc | None = ...) -> _FontDict: ... + def actual(self, *, displayof: tkinter.Misc | None = None) -> _FontDict: ... def config( self, *, @@ -99,14 +99,14 @@ class Font: def metrics(self, __option: Literal["fixed"], *, displayof: tkinter.Misc | None = ...) -> bool: ... @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... - def measure(self, text: str, displayof: tkinter.Misc | None = ...) -> int: ... + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... def __eq__(self, other: object) -> bool: ... -def families(root: tkinter.Misc | None = ..., displayof: tkinter.Misc | None = ...) -> tuple[str, ...]: ... -def names(root: tkinter.Misc | None = ...) -> tuple[str, ...]: ... +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... if sys.version_info >= (3, 10): - def nametofont(name: str, root: tkinter.Misc | None = ...) -> Font: ... + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... else: def nametofont(name: str) -> Font: ... diff --git a/mypy/typeshed/stdlib/tkinter/messagebox.pyi b/mypy/typeshed/stdlib/tkinter/messagebox.pyi index d99c588e3cd3..5a04b66d7866 100644 --- a/mypy/typeshed/stdlib/tkinter/messagebox.pyi +++ b/mypy/typeshed/stdlib/tkinter/messagebox.pyi @@ -34,11 +34,11 @@ NO: str class Message(Dialog): command: ClassVar[str] -def showinfo(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def showwarning(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def showerror(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def askquestion(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def askokcancel(title: str | None = ..., message: str | None = ..., **options) -> bool: ... -def askyesno(title: str | None = ..., message: str | None = ..., **options) -> bool: ... -def askyesnocancel(title: str | None = ..., message: str | None = ..., **options) -> bool | None: ... -def askretrycancel(title: str | None = ..., message: str | None = ..., **options) -> bool: ... +def showinfo(title: str | None = None, message: str | None = None, **options) -> str: ... +def showwarning(title: str | None = None, message: str | None = None, **options) -> str: ... +def showerror(title: str | None = None, message: str | None = None, **options) -> str: ... +def askquestion(title: str | None = None, message: str | None = None, **options) -> str: ... +def askokcancel(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesno(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesnocancel(title: str | None = None, message: str | None = None, **options) -> bool | None: ... +def askretrycancel(title: str | None = None, message: str | None = None, **options) -> bool: ... diff --git a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi index 4d8a7004c6b9..114f8c3de3ea 100644 --- a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -7,4 +7,4 @@ __all__ = ["ScrolledText"] class ScrolledText(Text): frame: Frame vbar: Scrollbar - def __init__(self, master: Misc | None = ..., **kwargs: Incomplete) -> None: ... + def __init__(self, master: Misc | None = None, **kwargs: Incomplete) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi index 8ae8b6d286d0..2c57cce7371c 100644 --- a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi @@ -1,11 +1,11 @@ from tkinter import Event, Frame, Misc, Toplevel class Dialog(Toplevel): - def __init__(self, parent: Misc | None, title: str | None = ...) -> None: ... + def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... def body(self, master: Frame) -> Misc | None: ... def buttonbox(self) -> None: ... - def ok(self, event: Event[Misc] | None = ...) -> None: ... - def cancel(self, event: Event[Misc] | None = ...) -> None: ... + def ok(self, event: Event[Misc] | None = None) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... def validate(self) -> bool: ... def apply(self) -> None: ... @@ -13,12 +13,12 @@ class SimpleDialog: def __init__( self, master: Misc | None, - text: str = ..., + text: str = "", buttons: list[str] = ..., - default: int | None = ..., - cancel: int | None = ..., - title: str | None = ..., - class_: str | None = ..., + default: int | None = None, + cancel: int | None = None, + title: str | None = None, + class_: str | None = None, ) -> None: ... def go(self) -> int | None: ... def return_event(self, event: Event[Misc]) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/tix.pyi b/mypy/typeshed/stdlib/tkinter/tix.pyi index db568bc4abef..5dd6f040fab7 100644 --- a/mypy/typeshed/stdlib/tkinter/tix.pyi +++ b/mypy/typeshed/stdlib/tkinter/tix.pyi @@ -38,22 +38,22 @@ TCL_ALL_EVENTS: Literal[0] class tixCommand: def tix_addbitmapdir(self, directory: str) -> None: ... def tix_cget(self, option: str) -> Any: ... - def tix_configure(self, cnf: dict[str, Any] | None = ..., **kw: Any) -> Any: ... - def tix_filedialog(self, dlgclass: str | None = ...) -> str: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... def tix_getbitmap(self, name: str) -> str: ... def tix_getimage(self, name: str) -> str: ... def tix_option_get(self, name: str) -> Any: ... - def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = ...) -> None: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... class Tk(tkinter.Tk, tixCommand): - def __init__(self, screenName: str | None = ..., baseName: str | None = ..., className: str = ...) -> None: ... + def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): def __init__( self, - master: tkinter.Misc | None = ..., - widgetName: str | None = ..., - static_options: list[str] | None = ..., + master: tkinter.Misc | None = None, + widgetName: str | None = None, + static_options: list[str] | None = None, cnf: dict[str, Any] = ..., kw: dict[str, Any] = ..., ) -> None: ... @@ -62,52 +62,50 @@ class TixWidget(tkinter.Widget): def subwidget(self, name: str) -> tkinter.Widget: ... def subwidgets_all(self) -> list[tkinter.Widget]: ... def config_all(self, option: Any, value: Any) -> None: ... - def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = ..., **kw) -> None: ... + def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): - def __init__( - self, master: tkinter.Widget, name: str, destroy_physically: int = ..., check_intermediate: int = ... - ) -> None: ... + def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: - def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = ..., **kw) -> None: ... + def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... def delete(self) -> None: ... def config(self, cnf: dict[str, Any] = ..., **kw): ... class Balloon(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = ..., **kw) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... def insert(self, index: int, str: str) -> None: ... def pick(self, index: int) -> None: ... class Control(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class LabelFrame(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class Meter(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class OptionMenu(TixWidget): def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... @@ -129,7 +127,7 @@ class Select(TixWidget): def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): @@ -164,13 +162,13 @@ class FileEntry(TixWidget): def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... - def add_child(self, parent: str | None = ..., cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def add_child(self, parent: str | None = None, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... def anchor_set(self, entry: str) -> None: ... def anchor_clear(self) -> None: ... # FIXME: Overload, certain combos return, others don't - def column_width(self, col: int = ..., width: int | None = ..., chars: int | None = ...) -> int | None: ... + def column_width(self, col: int = 0, width: int | None = None, chars: int | None = None) -> int | None: ... def delete_all(self) -> None: ... def delete_entry(self, entry: str) -> None: ... def delete_offsprings(self, entry: str) -> None: ... @@ -195,7 +193,7 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def indicator_size(self, entry: str) -> int: ... def info_anchor(self) -> str: ... def info_bbox(self, entry: str) -> tuple[int, int, int, int]: ... - def info_children(self, entry: str | None = ...) -> tuple[str, ...]: ... + def info_children(self, entry: str | None = None) -> tuple[str, ...]: ... def info_data(self, entry: str) -> Any: ... def info_dragsite(self) -> str: ... def info_dropsite(self) -> str: ... @@ -216,34 +214,34 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def see(self, entry: str) -> None: ... def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def selection_includes(self, entry: str) -> bool: ... - def selection_set(self, first: str, last: str | None = ...) -> None: ... + def selection_set(self, first: str, last: str | None = None) -> None: ... def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... def open(self, entrypath: str) -> None: ... - def getselection(self, mode: str = ...) -> tuple[str, ...]: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... def getstatus(self, entrypath: str) -> str: ... - def setstatus(self, entrypath: str, mode: str = ...) -> None: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... class Tree(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... def open(self, entrypath: str) -> None: ... - def setmode(self, entrypath: str, mode: str = ...) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... class TList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... def anchor_set(self, index: int) -> None: ... def anchor_clear(self) -> None: ... - def delete(self, from_: int, to: int | None = ...) -> None: ... + def delete(self, from_: int, to: int | None = None) -> None: ... def dragsite_set(self, index: int) -> None: ... def dragsite_clear(self) -> None: ... def dropsite_set(self, index: int) -> None: ... @@ -261,7 +259,7 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def see(self, index: int) -> None: ... def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def selection_includes(self, index: int) -> bool: ... - def selection_set(self, first: int, last: int | None = ...) -> None: ... + def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... @@ -280,7 +278,7 @@ class ListNoteBook(TixWidget): def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... def delete(self, name: str) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -289,7 +287,7 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class Form: def __setitem__(self, key: str, value: Any) -> None: ... @@ -297,6 +295,6 @@ class Form: def form(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def check(self) -> bool: ... def forget(self) -> None: ... - def grid(self, xsize: int = ..., ysize: int = ...) -> tuple[int, int] | None: ... - def info(self, option: str | None = ...): ... + def grid(self, xsize: int = 0, ysize: int = 0) -> tuple[int, int] | None: ... + def info(self, option: str | None = None): ... def slaves(self) -> list[tkinter.Widget]: ... diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index 07584ed9ed87..bd477535f41f 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -36,7 +36,7 @@ __all__ = [ ] def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... -def setup_master(master: Incomplete | None = ...): ... +def setup_master(master: Incomplete | None = None): ... _Padding: TypeAlias = Union[ tkinter._ScreenUnits, @@ -52,32 +52,32 @@ _TtkCompound: TypeAlias = Literal["text", "image", tkinter._Compound] class Style: master: Incomplete tk: _tkinter.TkappType - def __init__(self, master: tkinter.Misc | None = ...) -> None: ... - def configure(self, style, query_opt: Incomplete | None = ..., **kw): ... - def map(self, style, query_opt: Incomplete | None = ..., **kw): ... - def lookup(self, style, option, state: Incomplete | None = ..., default: Incomplete | None = ...): ... - def layout(self, style, layoutspec: Incomplete | None = ...): ... + def __init__(self, master: tkinter.Misc | None = None) -> None: ... + def configure(self, style, query_opt: Incomplete | None = None, **kw): ... + def map(self, style, query_opt: Incomplete | None = None, **kw): ... + def lookup(self, style, option, state: Incomplete | None = None, default: Incomplete | None = None): ... + def layout(self, style, layoutspec: Incomplete | None = None): ... def element_create(self, elementname, etype, *args, **kw) -> None: ... def element_names(self): ... def element_options(self, elementname): ... - def theme_create(self, themename, parent: Incomplete | None = ..., settings: Incomplete | None = ...) -> None: ... + def theme_create(self, themename, parent: Incomplete | None = None, settings: Incomplete | None = None) -> None: ... def theme_settings(self, themename, settings) -> None: ... def theme_names(self) -> tuple[str, ...]: ... @overload def theme_use(self, themename: str) -> None: ... @overload - def theme_use(self, themename: None = ...) -> str: ... + def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - def __init__(self, master: tkinter.Misc | None, widgetname, kw: Incomplete | None = ...) -> None: ... + def __init__(self, master: tkinter.Misc | None, widgetname, kw: Incomplete | None = None) -> None: ... def identify(self, x: int, y: int) -> str: ... - def instate(self, statespec, callback: Incomplete | None = ..., *args, **kw): ... - def state(self, statespec: Incomplete | None = ...): ... + def instate(self, statespec, callback: Incomplete | None = None, *args, **kw): ... + def state(self, statespec: Incomplete | None = None): ... class Button(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -98,7 +98,7 @@ class Button(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -122,7 +122,7 @@ class Button(Widget): class Checkbutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -148,7 +148,7 @@ class Checkbutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -174,8 +174,8 @@ class Checkbutton(Widget): class Entry(Widget, tkinter.Entry): def __init__( self, - master: tkinter.Misc | None = ..., - widget: str | None = ..., + master: tkinter.Misc | None = None, + widget: str | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -199,7 +199,7 @@ class Entry(Widget, tkinter.Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -224,7 +224,7 @@ class Entry(Widget, tkinter.Entry): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -252,7 +252,7 @@ class Entry(Widget, tkinter.Entry): class Combobox(Entry): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -279,7 +279,7 @@ class Combobox(Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -307,7 +307,7 @@ class Combobox(Entry): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -331,13 +331,13 @@ class Combobox(Entry): ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def current(self, newindex: int | None = ...) -> int: ... + def current(self, newindex: int | None = None) -> int: ... def set(self, value: Any) -> None: ... class Frame(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -354,7 +354,7 @@ class Frame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -373,7 +373,7 @@ class Frame(Widget): class Label(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, anchor: tkinter._Anchor = ..., background: tkinter._Color = ..., @@ -401,7 +401,7 @@ class Label(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, anchor: tkinter._Anchor = ..., background: tkinter._Color = ..., @@ -431,7 +431,7 @@ class Label(Widget): class Labelframe(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., # undocumented @@ -452,7 +452,7 @@ class Labelframe(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -477,7 +477,7 @@ LabelFrame = Labelframe class Menubutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., compound: _TtkCompound = ..., @@ -498,7 +498,7 @@ class Menubutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, compound: _TtkCompound = ..., cursor: tkinter._Cursor = ..., @@ -521,7 +521,7 @@ class Menubutton(Widget): class Notebook(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -535,7 +535,7 @@ class Notebook(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -564,15 +564,15 @@ class Notebook(Widget): def identify(self, x: int, y: int) -> str: ... def index(self, tab_id): ... def insert(self, pos, child, **kw) -> None: ... - def select(self, tab_id: Incomplete | None = ...): ... - def tab(self, tab_id, option: Incomplete | None = ..., **kw): ... + def select(self, tab_id: Incomplete | None = None): ... + def tab(self, tab_id, option: Incomplete | None = None, **kw): ... def tabs(self): ... def enable_traversal(self) -> None: ... class Panedwindow(Widget, tkinter.PanedWindow): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -588,7 +588,7 @@ class Panedwindow(Widget, tkinter.PanedWindow): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -602,7 +602,7 @@ class Panedwindow(Widget, tkinter.PanedWindow): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -614,15 +614,15 @@ class Panedwindow(Widget, tkinter.PanedWindow): def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget: Incomplete def insert(self, pos, child, **kw) -> None: ... - def pane(self, pane, option: Incomplete | None = ..., **kw): ... - def sashpos(self, index, newpos: Incomplete | None = ...): ... + def pane(self, pane, option: Incomplete | None = None, **kw): ... + def sashpos(self, index, newpos: Incomplete | None = None): ... PanedWindow = Panedwindow class Progressbar(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -640,7 +640,7 @@ class Progressbar(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., length: tkinter._ScreenUnits = ..., @@ -656,14 +656,14 @@ class Progressbar(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def start(self, interval: Literal["idle"] | int | None = ...) -> None: ... - def step(self, amount: float | None = ...) -> None: ... + def start(self, interval: Literal["idle"] | int | None = None) -> None: ... + def step(self, amount: float | None = None) -> None: ... def stop(self) -> None: ... class Radiobutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -685,7 +685,7 @@ class Radiobutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -711,7 +711,7 @@ class Radiobutton(Widget): class Scale(Widget, tkinter.Scale): # type: ignore[misc] def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: str | Callable[[str], object] = ..., @@ -730,7 +730,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: str | Callable[[str], object] = ..., cursor: tkinter._Cursor = ..., @@ -750,7 +750,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: str | Callable[[str], object] = ..., cursor: tkinter._Cursor = ..., @@ -766,13 +766,13 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def get(self, x: int | None = ..., y: int | None = ...) -> float: ... + def get(self, x: int | None = None, y: int | None = None) -> float: ... # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: Callable[..., tuple[float, float] | None] | str = ..., @@ -785,7 +785,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., @@ -799,7 +799,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., @@ -813,7 +813,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] class Separator(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -825,7 +825,7 @@ class Separator(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., orient: Literal["horizontal", "vertical"] = ..., @@ -839,7 +839,7 @@ class Separator(Widget): class Sizegrip(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -850,7 +850,7 @@ class Sizegrip(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., style: str = ..., @@ -863,7 +863,7 @@ class Sizegrip(Widget): class Spinbox(Entry): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -894,7 +894,7 @@ class Spinbox(Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., @@ -958,7 +958,7 @@ _TreeviewColumnId: TypeAlias = int | str # manual page: "COLUMN IDENTIFIERS" class Treeview(Widget, tkinter.XView, tkinter.YView): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., columns: str | list[str] | tuple[str, ...] = ..., @@ -981,7 +981,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, columns: str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., @@ -998,8 +998,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item, column: _TreeviewColumnId | None = ...) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] - def get_children(self, item: str | None = ...) -> tuple[str, ...]: ... + def bbox(self, item, column: _TreeviewColumnId | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | None = None) -> tuple[str, ...]: ... def set_children(self, item: str, *newchildren: str) -> None: ... @overload def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ... @@ -1015,7 +1015,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def column( self, column: _TreeviewColumnId, - option: None = ..., + option: None = None, *, width: int = ..., minwidth: int = ..., @@ -1027,7 +1027,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def detach(self, *items: str) -> None: ... def exists(self, item: str) -> bool: ... @overload # type: ignore[override] - def focus(self, item: None = ...) -> str: ... # can return empty string + def focus(self, item: None = None) -> str: ... # can return empty string @overload def focus(self, item: str) -> Literal[""]: ... @overload @@ -1041,12 +1041,12 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def heading(self, column: _TreeviewColumnId, option: str) -> Any: ... @overload - def heading(self, column: _TreeviewColumnId, option: None = ...) -> _TreeviewHeaderDict: ... # type: ignore[misc] + def heading(self, column: _TreeviewColumnId, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[misc] @overload def heading( self, column: _TreeviewColumnId, - option: None = ..., + option: None = None, *, text: str = ..., image: tkinter._ImageSpec = ..., @@ -1063,7 +1063,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): self, parent: str, index: int | Literal["end"], - iid: str | None = ..., + iid: str | None = None, *, id: str = ..., # same as iid text: str = ..., @@ -1085,12 +1085,12 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def item(self, item: str, option: str) -> Any: ... @overload - def item(self, item: str, option: None = ...) -> _TreeviewItemDict: ... # type: ignore[misc] + def item(self, item: str, option: None = None) -> _TreeviewItemDict: ... # type: ignore[misc] @overload def item( self, item: str, - option: None = ..., + option: None = None, *, text: str = ..., image: tkinter._ImageSpec = ..., @@ -1107,23 +1107,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): if sys.version_info >= (3, 8): def selection(self) -> tuple[str, ...]: ... else: - def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = ...) -> tuple[str, ...]: ... + def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = None) -> tuple[str, ...]: ... def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ... @overload - def set(self, item: str, column: None = ..., value: None = ...) -> dict[str, Any]: ... + def set(self, item: str, column: None = None, value: None = None) -> dict[str, Any]: ... @overload - def set(self, item: str, column: _TreeviewColumnId, value: None = ...) -> Any: ... + def set(self, item: str, column: _TreeviewColumnId, value: None = None) -> Any: ... @overload def set(self, item: str, column: _TreeviewColumnId, value: Any) -> Literal[""]: ... # There's no tag_unbind() or 'add' argument for whatever reason. # Also, it's 'callback' instead of 'func' here. @overload def tag_bind( - self, tagname: str, sequence: str | None = ..., callback: Callable[[tkinter.Event[Treeview]], object] | None = ... + self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None ) -> str: ... @overload def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... @@ -1139,7 +1139,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def tag_configure( self, tagname: str, - option: None = ..., + option: None = None, *, # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug foreground: tkinter._Color = ..., @@ -1148,7 +1148,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._ImageSpec = ..., ) -> _TreeviewTagDict | Any: ... # can be None but annoying to check @overload - def tag_has(self, tagname: str, item: None = ...) -> tuple[str, ...]: ... + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... @overload def tag_has(self, tagname: str, item: str) -> bool: ... @@ -1158,10 +1158,10 @@ class LabeledScale(Frame): # TODO: don't any-type **kw. That goes to Frame.__init__. def __init__( self, - master: tkinter.Misc | None = ..., - variable: tkinter.IntVar | tkinter.DoubleVar | None = ..., - from_: float = ..., - to: float = ..., + master: tkinter.Misc | None = None, + variable: tkinter.IntVar | tkinter.DoubleVar | None = None, + from_: float = 0, + to: float = 10, *, compound: Literal["top", "bottom"] = ..., **kw, @@ -1174,7 +1174,7 @@ class OptionMenu(Menubutton): self, master, variable, - default: str | None = ..., + default: str | None = None, *values: str, # rest of these are keyword-only because *args syntax used above style: str = ..., @@ -1183,4 +1183,4 @@ class OptionMenu(Menubutton): ) -> None: ... # configure, config, cget, destroy are inherited from Menubutton # destroy and __setitem__ are overridden, signature does not change - def set_menu(self, default: Incomplete | None = ..., *values) -> None: ... + def set_menu(self, default: Incomplete | None = None, *values) -> None: ... diff --git a/mypy/typeshed/stdlib/trace.pyi b/mypy/typeshed/stdlib/trace.pyi index 1f0de1d4d964..f79b38f1ce82 100644 --- a/mypy/typeshed/stdlib/trace.pyi +++ b/mypy/typeshed/stdlib/trace.pyi @@ -14,35 +14,35 @@ _FileModuleFunction: TypeAlias = tuple[str, str | None, str] class CoverageResults: def __init__( self, - counts: dict[tuple[str, int], int] | None = ..., - calledfuncs: dict[_FileModuleFunction, int] | None = ..., - infile: StrPath | None = ..., - callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = ..., - outfile: StrPath | None = ..., + counts: dict[tuple[str, int], int] | None = None, + calledfuncs: dict[_FileModuleFunction, int] | None = None, + infile: StrPath | None = None, + callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, + outfile: StrPath | None = None, ) -> None: ... # undocumented def update(self, other: CoverageResults) -> None: ... - def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: StrPath | None = ...) -> None: ... + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... def write_results_file( - self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = ... + self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None ) -> tuple[int, int]: ... def is_ignored_filename(self, filename: str) -> bool: ... # undocumented class Trace: def __init__( self, - count: int = ..., - trace: int = ..., - countfuncs: int = ..., - countcallers: int = ..., + count: int = 1, + trace: int = 1, + countfuncs: int = 0, + countcallers: int = 0, ignoremods: Sequence[str] = ..., ignoredirs: Sequence[str] = ..., - infile: StrPath | None = ..., - outfile: StrPath | None = ..., - timing: bool = ..., + infile: StrPath | None = None, + outfile: StrPath | None = None, + timing: bool = False, ) -> None: ... def run(self, cmd: str | types.CodeType) -> None: ... def runctx( - self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = ..., locals: Mapping[str, Any] | None = ... + self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None ) -> None: ... if sys.version_info >= (3, 9): def runfunc(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index bf8e24e7ab27..cdda50c0a1b3 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -29,7 +29,7 @@ __all__ = [ _PT: TypeAlias = tuple[str, int, str, str | None] -def print_tb(tb: TracebackType | None, limit: int | None = ..., file: SupportsWrite[str] | None = ...) -> None: ... +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): @overload @@ -37,51 +37,51 @@ if sys.version_info >= (3, 10): __exc: type[BaseException] | None, value: BaseException | None = ..., tb: TracebackType | None = ..., - limit: int | None = ..., - file: SupportsWrite[str] | None = ..., - chain: bool = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, ) -> None: ... @overload def print_exception( - __exc: BaseException, *, limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ... + __exc: BaseException, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True ) -> None: ... @overload def format_exception( __exc: type[BaseException] | None, value: BaseException | None = ..., tb: TracebackType | None = ..., - limit: int | None = ..., - chain: bool = ..., + limit: int | None = None, + chain: bool = True, ) -> list[str]: ... @overload - def format_exception(__exc: BaseException, *, limit: int | None = ..., chain: bool = ...) -> list[str]: ... + def format_exception(__exc: BaseException, *, limit: int | None = None, chain: bool = True) -> list[str]: ... else: def print_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, - limit: int | None = ..., - file: SupportsWrite[str] | None = ..., - chain: bool = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, ) -> None: ... def format_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, - limit: int | None = ..., - chain: bool = ..., + limit: int | None = None, + chain: bool = True, ) -> list[str]: ... -def print_exc(limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... -def print_last(limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... -def print_stack(f: FrameType | None = ..., limit: int | None = ..., file: SupportsWrite[str] | None = ...) -> None: ... -def extract_tb(tb: TracebackType | None, limit: int | None = ...) -> StackSummary: ... -def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> StackSummary: ... +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... def format_list(extracted_list: list[FrameSummary]) -> list[str]: ... # undocumented -def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = ...) -> None: ... +def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... @@ -89,9 +89,9 @@ if sys.version_info >= (3, 10): else: def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... -def format_exc(limit: int | None = ..., chain: bool = ...) -> str: ... -def format_tb(tb: TracebackType | None, limit: int | None = ...) -> list[str]: ... -def format_stack(f: FrameType | None = ..., limit: int | None = ...) -> list[str]: ... +def format_exc(limit: int | None = None, chain: bool = True) -> str: ... +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... def clear_frames(tb: TracebackType | None) -> None: ... def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... @@ -99,7 +99,7 @@ def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... if sys.version_info >= (3, 11): class _ExceptionPrintContext: def indent(self) -> str: ... - def emit(self, text_gen: str | Iterable[str], margin_char: str | None = ...) -> Generator[str, None, None]: ... + def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... class TracebackException: __cause__: TracebackException @@ -119,13 +119,13 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - compact: bool = ..., - max_group_width: int = ..., - max_group_depth: int = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( @@ -146,11 +146,11 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - compact: bool = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( @@ -169,10 +169,10 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( @@ -181,14 +181,14 @@ class TracebackException: def __eq__(self, other: object) -> bool: ... if sys.version_info >= (3, 11): - def format(self, *, chain: bool = ..., _ctx: _ExceptionPrintContext | None = ...) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... else: - def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... def format_exception_only(self) -> Generator[str, None, None]: ... if sys.version_info >= (3, 11): - def print(self, *, file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... class FrameSummary(Iterable[Any]): if sys.version_info >= (3, 11): @@ -198,12 +198,12 @@ class FrameSummary(Iterable[Any]): lineno: int | None, name: str, *, - lookup_line: bool = ..., - locals: Mapping[str, str] | None = ..., - line: str | None = ..., - end_lineno: int | None = ..., - colno: int | None = ..., - end_colno: int | None = ..., + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + end_lineno: int | None = None, + colno: int | None = None, + end_colno: int | None = None, ) -> None: ... end_lineno: int | None colno: int | None @@ -215,9 +215,9 @@ class FrameSummary(Iterable[Any]): lineno: int | None, name: str, *, - lookup_line: bool = ..., - locals: Mapping[str, str] | None = ..., - line: str | None = ..., + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, ) -> None: ... filename: str lineno: int | None @@ -246,9 +246,9 @@ class StackSummary(list[FrameSummary]): cls, frame_gen: Iterable[tuple[FrameType, int]], *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, ) -> StackSummary: ... @classmethod def from_list(cls, a_list: Iterable[FrameSummary | _PT]) -> StackSummary: ... diff --git a/mypy/typeshed/stdlib/tracemalloc.pyi b/mypy/typeshed/stdlib/tracemalloc.pyi index ed952616600f..d7214de285f8 100644 --- a/mypy/typeshed/stdlib/tracemalloc.pyi +++ b/mypy/typeshed/stdlib/tracemalloc.pyi @@ -23,7 +23,12 @@ class Filter(BaseFilter): def filename_pattern(self) -> str: ... all_frames: bool def __init__( - self, inclusive: bool, filename_pattern: str, lineno: int | None = ..., all_frames: bool = ..., domain: int | None = ... + self, + inclusive: bool, + filename_pattern: str, + lineno: int | None = None, + all_frames: bool = False, + domain: int | None = None, ) -> None: ... class Statistic: @@ -80,11 +85,11 @@ class Traceback(Sequence[Frame]): if sys.version_info >= (3, 9): @property def total_nframe(self) -> int | None: ... - def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = ...) -> None: ... + def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... else: def __init__(self, frames: Sequence[_FrameTuple]) -> None: ... - def format(self, limit: int | None = ..., most_recent_first: bool = ...) -> list[str]: ... + def format(self, limit: int | None = None, most_recent_first: bool = False) -> list[str]: ... @overload def __getitem__(self, index: SupportsIndex) -> Frame: ... @overload @@ -104,11 +109,11 @@ class Traceback(Sequence[Frame]): class Snapshot: def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... - def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = ...) -> list[StatisticDiff]: ... + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... def dump(self, filename: str) -> None: ... def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... @staticmethod def load(filename: str) -> Snapshot: ... - def statistics(self, key_type: str, cumulative: bool = ...) -> list[Statistic]: ... + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... traceback_limit: int traces: Sequence[Trace] diff --git a/mypy/typeshed/stdlib/tty.pyi b/mypy/typeshed/stdlib/tty.pyi index 8edae9ec2deb..43f2e1cf9087 100644 --- a/mypy/typeshed/stdlib/tty.pyi +++ b/mypy/typeshed/stdlib/tty.pyi @@ -15,5 +15,5 @@ if sys.platform != "win32": ISPEED: int OSPEED: int CC: int - def setraw(fd: _FD, when: int = ...) -> None: ... - def setcbreak(fd: _FD, when: int = ...) -> None: ... + def setraw(fd: _FD, when: int = 2) -> None: ... + def setcbreak(fd: _FD, when: int = 2) -> None: ... diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 13197c336e5e..1259ca6fb4cc 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -161,11 +161,11 @@ class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] hscroll: Scrollbar vscroll: Scrollbar def __init__( - self, master: Misc | None, width: int = ..., height: int = ..., canvwidth: int = ..., canvheight: int = ... + self, master: Misc | None, width: int = 500, height: int = 350, canvwidth: int = 600, canvheight: int = 500 ) -> None: ... canvwidth: int canvheight: int - def reset(self, canvwidth: int | None = ..., canvheight: int | None = ..., bg: str | None = ...) -> None: ... + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... class TurtleScreenBase: cv: Canvas @@ -177,27 +177,27 @@ class TurtleScreenBase: def mainloop(self) -> None: ... def textinput(self, title: str, prompt: str) -> str | None: ... def numinput( - self, title: str, prompt: str, default: float | None = ..., minval: float | None = ..., maxval: float | None = ... + self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: ... class Terminator(Exception): ... class TurtleGraphicsError(Exception): ... class Shape: - def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = ...) -> None: ... - def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = ...) -> None: ... + def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = None) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... class TurtleScreen(TurtleScreenBase): - def __init__(self, cv: Canvas, mode: str = ..., colormode: float = ..., delay: int = ...) -> None: ... + def __init__(self, cv: Canvas, mode: str = "standard", colormode: float = 1.0, delay: int = 10) -> None: ... def clear(self) -> None: ... @overload - def mode(self, mode: None = ...) -> str: ... + def mode(self, mode: None = None) -> str: ... @overload def mode(self, mode: str) -> None: ... def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... - def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = ...) -> None: ... + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload - def colormode(self, cmode: None = ...) -> float: ... + def colormode(self, cmode: None = None) -> float: ... @overload def colormode(self, cmode: float) -> None: ... def reset(self) -> None: ... @@ -209,11 +209,11 @@ class TurtleScreen(TurtleScreenBase): @overload def bgcolor(self, r: float, g: float, b: float) -> None: ... @overload - def tracer(self, n: None = ...) -> int: ... + def tracer(self, n: None = None) -> int: ... @overload - def tracer(self, n: int, delay: int | None = ...) -> None: ... + def tracer(self, n: int, delay: int | None = None) -> None: ... @overload - def delay(self, delay: None = ...) -> int: ... + def delay(self, delay: None = None) -> int: ... @overload def delay(self, delay: int) -> None: ... def update(self) -> None: ... @@ -221,24 +221,24 @@ class TurtleScreen(TurtleScreenBase): def window_height(self) -> int: ... def getcanvas(self) -> Canvas: ... def getshapes(self) -> list[str]: ... - def onclick(self, fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def onkey(self, fun: Callable[[], object], key: str) -> None: ... - def listen(self, xdummy: float | None = ..., ydummy: float | None = ...) -> None: ... - def ontimer(self, fun: Callable[[], object], t: int = ...) -> None: ... + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... @overload - def bgpic(self, picname: None = ...) -> str: ... + def bgpic(self, picname: None = None) -> str: ... @overload def bgpic(self, picname: str) -> None: ... @overload - def screensize(self, canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload - def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... + def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape - def onkeypress(self, fun: Callable[[], object], key: str | None = ...) -> None: ... + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey class TNavigator: @@ -246,9 +246,9 @@ class TNavigator: DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int DEFAULT_ANGLEORIENT: int - def __init__(self, mode: str = ...) -> None: ... + def __init__(self, mode: str = "standard") -> None: ... def reset(self) -> None: ... - def degrees(self, fullcircle: float = ...) -> None: ... + def degrees(self, fullcircle: float = 360.0) -> None: ... def radians(self) -> None: ... def forward(self, distance: float) -> None: ... def back(self, distance: float) -> None: ... @@ -258,23 +258,23 @@ class TNavigator: def xcor(self) -> float: ... def ycor(self) -> float: ... @overload - def goto(self, x: tuple[float, float], y: None = ...) -> None: ... + def goto(self, x: tuple[float, float], y: None = None) -> None: ... @overload def goto(self, x: float, y: float) -> None: ... def home(self) -> None: ... def setx(self, x: float) -> None: ... def sety(self, y: float) -> None: ... @overload - def distance(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(self, x: float, y: float) -> float: ... @overload - def towards(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(self, x: float, y: float) -> float: ... def heading(self) -> float: ... def setheading(self, to_angle: float) -> None: ... - def circle(self, radius: float, extent: float | None = ..., steps: int | None = ...) -> None: ... + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... fd = forward bk = back backward = back @@ -286,20 +286,20 @@ class TNavigator: seth = setheading class TPen: - def __init__(self, resizemode: str = ...) -> None: ... + def __init__(self, resizemode: str = "noresize") -> None: ... @overload - def resizemode(self, rmode: None = ...) -> str: ... + def resizemode(self, rmode: None = None) -> str: ... @overload def resizemode(self, rmode: str) -> None: ... @overload - def pensize(self, width: None = ...) -> int: ... + def pensize(self, width: None = None) -> int: ... @overload def pensize(self, width: int) -> None: ... def penup(self) -> None: ... def pendown(self) -> None: ... def isdown(self) -> bool: ... @overload - def speed(self, speed: None = ...) -> int: ... + def speed(self, speed: None = None) -> int: ... @overload def speed(self, speed: _Speed) -> None: ... @overload @@ -331,7 +331,7 @@ class TPen: @overload def pen( self, - pen: _PenState | None = ..., + pen: _PenState | None = None, *, shown: bool = ..., pendown: bool = ..., @@ -356,7 +356,11 @@ class RawTurtle(TPen, TNavigator): screen: TurtleScreen screens: ClassVar[list[TurtleScreen]] def __init__( - self, canvas: Canvas | TurtleScreen | None = ..., shape: str = ..., undobuffersize: int = ..., visible: bool = ... + self, + canvas: Canvas | TurtleScreen | None = None, + shape: str = "classic", + undobuffersize: int = 1000, + visible: bool = True, ) -> None: ... def reset(self) -> None: ... def setundobuffer(self, size: int | None) -> None: ... @@ -364,7 +368,7 @@ class RawTurtle(TPen, TNavigator): def clear(self) -> None: ... def clone(self: Self) -> Self: ... @overload - def shape(self, name: None = ...) -> str: ... + def shape(self, name: None = None) -> str: ... @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @@ -372,10 +376,10 @@ class RawTurtle(TPen, TNavigator): def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[misc] @overload def shapesize( - self, stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ... + self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None ) -> None: ... @overload - def shearfactor(self, shear: None = ...) -> float: ... + def shearfactor(self, shear: None = None) -> float: ... @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @@ -383,12 +387,12 @@ class RawTurtle(TPen, TNavigator): def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( - self, t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... + self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... def get_shapepoly(self) -> _PolygonCoords | None: ... def settiltangle(self, angle: float) -> None: ... @overload - def tiltangle(self, angle: None = ...) -> float: ... + def tiltangle(self, angle: None = None) -> float: ... @overload def tiltangle(self, angle: float) -> None: ... def tilt(self, angle: float) -> None: ... @@ -397,21 +401,21 @@ class RawTurtle(TPen, TNavigator): # we return Any. def stamp(self) -> Any: ... def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... - def clearstamps(self, n: int | None = ...) -> None: ... + def clearstamps(self, n: int | None = None) -> None: ... def filling(self) -> bool: ... def begin_fill(self) -> None: ... def end_fill(self) -> None: ... - def dot(self, size: int | None = ..., *color: _Color) -> None: ... - def write(self, arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... + def dot(self, size: int | None = None, *color: _Color) -> None: ... + def write(self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... def begin_poly(self) -> None: ... def end_poly(self) -> None: ... def get_poly(self) -> _PolygonCoords | None: ... def getscreen(self) -> TurtleScreen: ... def getturtle(self: Self) -> Self: ... getpen = getturtle - def onclick(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... - def onrelease(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... - def ondrag(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... def undo(self) -> None: ... turtlesize = shapesize @@ -420,22 +424,22 @@ class _Screen(TurtleScreen): # Note int and float are interpreted differently, hence the Union instead of just float def setup( self, - width: int | float = ..., # noqa: Y041 - height: int | float = ..., # noqa: Y041 - startx: int | None = ..., - starty: int | None = ..., + width: int | float = 0.5, # noqa: Y041 + height: int | float = 0.75, # noqa: Y041 + startx: int | None = None, + starty: int | None = None, ) -> None: ... def title(self, titlestring: str) -> None: ... def bye(self) -> None: ... def exitonclick(self) -> None: ... class Turtle(RawTurtle): - def __init__(self, shape: str = ..., undobuffersize: int = ..., visible: bool = ...) -> None: ... + def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... RawPen = RawTurtle Pen = Turtle -def write_docstringdict(filename: str = ...) -> None: ... +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... # Note: it's somewhat unfortunate that we have to copy the function signatures. # It would be nice if we could partially reduce the redundancy by doing something @@ -453,20 +457,20 @@ def write_docstringdict(filename: str = ...) -> None: ... def mainloop() -> None: ... def textinput(title: str, prompt: str) -> str | None: ... def numinput( - title: str, prompt: str, default: float | None = ..., minval: float | None = ..., maxval: float | None = ... + title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: ... # Functions copied from TurtleScreen: def clear() -> None: ... @overload -def mode(mode: None = ...) -> str: ... +def mode(mode: None = None) -> str: ... @overload def mode(mode: str) -> None: ... def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... -def register_shape(name: str, shape: _PolygonCoords | Shape | None = ...) -> None: ... +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload -def colormode(cmode: None = ...) -> float: ... +def colormode(cmode: None = None) -> float: ... @overload def colormode(cmode: float) -> None: ... def reset() -> None: ... @@ -478,11 +482,11 @@ def bgcolor(color: _Color) -> None: ... @overload def bgcolor(r: float, g: float, b: float) -> None: ... @overload -def tracer(n: None = ...) -> int: ... +def tracer(n: None = None) -> int: ... @overload -def tracer(n: int, delay: int | None = ...) -> None: ... +def tracer(n: int, delay: int | None = None) -> None: ... @overload -def delay(delay: None = ...) -> int: ... +def delay(delay: None = None) -> int: ... @overload def delay(delay: int) -> None: ... def update() -> None: ... @@ -490,31 +494,31 @@ def window_width() -> int: ... def window_height() -> int: ... def getcanvas() -> Canvas: ... def getshapes() -> list[str]: ... -def onclick(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def onkey(fun: Callable[[], object], key: str) -> None: ... -def listen(xdummy: float | None = ..., ydummy: float | None = ...) -> None: ... -def ontimer(fun: Callable[[], object], t: int = ...) -> None: ... +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... +def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... @overload -def bgpic(picname: None = ...) -> str: ... +def bgpic(picname: None = None) -> str: ... @overload def bgpic(picname: str) -> None: ... @overload -def screensize(canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... @overload -def screensize(canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... +def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape -def onkeypress(fun: Callable[[], object], key: str | None = ...) -> None: ... +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey # Functions copied from _Screen: -def setup(width: float = ..., height: float = ..., startx: int | None = ..., starty: int | None = ...) -> None: ... +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... def title(titlestring: str) -> None: ... def bye() -> None: ... def exitonclick() -> None: ... @@ -522,7 +526,7 @@ def Screen() -> _Screen: ... # Functions copied from TNavigator: -def degrees(fullcircle: float = ...) -> None: ... +def degrees(fullcircle: float = 360.0) -> None: ... def radians() -> None: ... def forward(distance: float) -> None: ... def back(distance: float) -> None: ... @@ -532,23 +536,23 @@ def pos() -> Vec2D: ... def xcor() -> float: ... def ycor() -> float: ... @overload -def goto(x: tuple[float, float], y: None = ...) -> None: ... +def goto(x: tuple[float, float], y: None = None) -> None: ... @overload def goto(x: float, y: float) -> None: ... def home() -> None: ... def setx(x: float) -> None: ... def sety(y: float) -> None: ... @overload -def distance(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(x: float, y: float) -> float: ... @overload -def towards(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(x: float, y: float) -> float: ... def heading() -> float: ... def setheading(to_angle: float) -> None: ... -def circle(radius: float, extent: float | None = ..., steps: int | None = ...) -> None: ... +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... fd = forward bk = back @@ -562,18 +566,18 @@ seth = setheading # Functions copied from TPen: @overload -def resizemode(rmode: None = ...) -> str: ... +def resizemode(rmode: None = None) -> str: ... @overload def resizemode(rmode: str) -> None: ... @overload -def pensize(width: None = ...) -> int: ... +def pensize(width: None = None) -> int: ... @overload def pensize(width: int) -> None: ... def penup() -> None: ... def pendown() -> None: ... def isdown() -> bool: ... @overload -def speed(speed: None = ...) -> int: ... +def speed(speed: None = None) -> int: ... @overload def speed(speed: _Speed) -> None: ... @overload @@ -605,7 +609,7 @@ def isvisible() -> bool: ... def pen() -> _PenState: ... # type: ignore[misc] @overload def pen( - pen: _PenState | None = ..., + pen: _PenState | None = None, *, shown: bool = ..., pendown: bool = ..., @@ -632,7 +636,7 @@ ht = hideturtle def setundobuffer(size: int | None) -> None: ... def undobufferentries() -> int: ... @overload -def shape(name: None = ...) -> str: ... +def shape(name: None = None) -> str: ... @overload def shape(name: str) -> None: ... @@ -640,9 +644,9 @@ def shape(name: str) -> None: ... @overload def shapesize() -> tuple[float, float, float]: ... # type: ignore[misc] @overload -def shapesize(stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ...) -> None: ... +def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... @overload -def shearfactor(shear: None = ...) -> float: ... +def shearfactor(shear: None = None) -> float: ... @overload def shearfactor(shear: float) -> None: ... @@ -651,12 +655,12 @@ def shearfactor(shear: float) -> None: ... def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( - t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... + t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... def get_shapepoly() -> _PolygonCoords | None: ... def settiltangle(angle: float) -> None: ... @overload -def tiltangle(angle: None = ...) -> float: ... +def tiltangle(angle: None = None) -> float: ... @overload def tiltangle(angle: float) -> None: ... def tilt(angle: float) -> None: ... @@ -666,12 +670,12 @@ def tilt(angle: float) -> None: ... # we return Any. def stamp() -> Any: ... def clearstamp(stampid: int | tuple[int, ...]) -> None: ... -def clearstamps(n: int | None = ...) -> None: ... +def clearstamps(n: int | None = None) -> None: ... def filling() -> bool: ... def begin_fill() -> None: ... def end_fill() -> None: ... -def dot(size: int | None = ..., *color: _Color) -> None: ... -def write(arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... +def dot(size: int | None = None, *color: _Color) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... def begin_poly() -> None: ... def end_poly() -> None: ... def get_poly() -> _PolygonCoords | None: ... @@ -680,8 +684,8 @@ def getturtle() -> Turtle: ... getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... -def ondrag(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def undo() -> None: ... turtlesize = shapesize diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index e3e6418347b1..5fb24106685e 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -16,7 +16,7 @@ from collections.abc import ( from importlib.machinery import ModuleSpec # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping -from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y027 +from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y022 from typing_extensions import Literal, ParamSpec, final __all__ = [ @@ -241,13 +241,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -264,13 +264,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -285,13 +285,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -310,7 +310,7 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): def __getitem__(self, __key: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... - def __eq__(self, other: object) -> bool: ... + def __eq__(self, __value: object) -> bool: ... def copy(self) -> dict[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... @@ -414,7 +414,7 @@ class _StaticFunctionType: # By wrapping FunctionType in _StaticFunctionType, we get the right result; # similar to wrapping a function in staticmethod() at runtime to prevent it # being bound as a method. - def __get__(self, obj: object | None, type: type | None) -> FunctionType: ... + def __get__(self, obj: object, type: type | None) -> FunctionType: ... @final class MethodType: @@ -555,12 +555,12 @@ class MemberDescriptorType: def new_class( name: str, bases: Iterable[object] = ..., - kwds: dict[str, Any] | None = ..., - exec_body: Callable[[dict[str, Any]], object] | None = ..., + kwds: dict[str, Any] | None = None, + exec_body: Callable[[dict[str, Any]], object] | None = None, ) -> type: ... def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... def prepare_class( - name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = ... + name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = None ) -> tuple[type, dict[str, Any], dict[str, Any]]: ... # Actually a different type, but `property` is special and we want that too. diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 71018003b6d9..eaa566582fb4 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -137,7 +137,7 @@ class TypeVar: __covariant__: bool __contravariant__: bool def __init__( - self, name: str, *constraints: Any, bound: Any | None = ..., covariant: bool = ..., contravariant: bool = ... + self, name: str, *constraints: Any, bound: Any | None = None, covariant: bool = False, contravariant: bool = False ) -> None: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... @@ -215,7 +215,9 @@ if sys.version_info >= (3, 10): __bound__: Any | None __covariant__: bool __contravariant__: bool - def __init__(self, name: str, *, bound: Any | None = ..., contravariant: bool = ..., covariant: bool = ...) -> None: ... + def __init__( + self, name: str, *, bound: Any | None = None, contravariant: bool = False, covariant: bool = False + ) -> None: ... @property def args(self) -> ParamSpecArgs: ... @property @@ -360,11 +362,11 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @overload @abstractmethod def throw( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> _T_co: ... @overload @abstractmethod - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... def close(self) -> None: ... def __iter__(self) -> Generator[_T_co, _T_contra, _V_co]: ... @property @@ -397,11 +399,11 @@ class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): @overload @abstractmethod def throw( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> _T_co: ... @overload @abstractmethod - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... @abstractmethod def close(self) -> None: ... @@ -430,11 +432,11 @@ class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @overload @abstractmethod def athrow( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> Awaitable[_T_co]: ... @overload @abstractmethod - def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Awaitable[_T_co]: ... + def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> Awaitable[_T_co]: ... def aclose(self) -> Awaitable[None]: ... @property def ag_await(self) -> Any: ... @@ -465,7 +467,7 @@ class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @abstractmethod def __getitem__(self, index: slice) -> Sequence[_T_co]: ... # Mixin methods - def index(self, value: Any, start: int = ..., stop: int = ...) -> int: ... + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... def count(self, value: Any) -> int: ... def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... @@ -497,7 +499,7 @@ class MutableSequence(Sequence[_T], Generic[_T]): def clear(self) -> None: ... def extend(self, values: Iterable[_T]) -> None: ... def reverse(self) -> None: ... - def pop(self, index: int = ...) -> _T: ... + def pop(self, index: int = -1) -> _T: ... def remove(self, value: _T) -> None: ... def __iadd__(self: _typeshed.Self, values: Iterable[_T]) -> _typeshed.Self: ... @@ -600,9 +602,13 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: ... # This overload should be allowed only if the value type is compatible with None. - # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + # + # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: + # -- collections.OrderedDict.setdefault + # -- collections.ChainMap.setdefault + # -- weakref.WeakKeyDictionary.setdefault @overload - def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT) -> _T | None: ... + def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT, __default: None = None) -> _T | None: ... @overload def setdefault(self, __key: _KT, __default: _VT) -> _VT: ... # 'update' used to take a Union, but using overloading is better. @@ -658,21 +664,21 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): @abstractmethod def isatty(self) -> bool: ... @abstractmethod - def read(self, __n: int = ...) -> AnyStr: ... + def read(self, __n: int = -1) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod - def readline(self, __limit: int = ...) -> AnyStr: ... + def readline(self, __limit: int = -1) -> AnyStr: ... @abstractmethod - def readlines(self, __hint: int = ...) -> list[AnyStr]: ... + def readlines(self, __hint: int = -1) -> list[AnyStr]: ... @abstractmethod - def seek(self, __offset: int, __whence: int = ...) -> int: ... + def seek(self, __offset: int, __whence: int = 0) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod - def truncate(self, __size: int | None = ...) -> int: ... + def truncate(self, __size: int | None = None) -> int: ... @abstractmethod def writable(self) -> bool: ... @abstractmethod @@ -728,14 +734,14 @@ _get_type_hints_obj_allowed_types = ( # noqa: Y026 # TODO: Use TypeAlias once if sys.version_info >= (3, 9): def get_type_hints( obj: _get_type_hints_obj_allowed_types, - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - include_extras: bool = ..., + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, ) -> dict[str, Any]: ... else: def get_type_hints( - obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = ..., localns: dict[str, Any] | None = ... + obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None ) -> dict[str, Any]: ... if sys.version_info >= (3, 8): @@ -757,9 +763,9 @@ if sys.version_info >= (3, 11): def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... def dataclass_transform( *, - eq_default: bool = ..., - order_default: bool = ..., - kw_only_default: bool = ..., + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: Any, ) -> IdentityFunction: ... @@ -821,9 +827,9 @@ class ForwardRef: __forward_module__: Any | None if sys.version_info >= (3, 9): # The module and is_class arguments were added in later Python 3.9 versions. - def __init__(self, arg: str, is_argument: bool = ..., module: Any | None = ..., *, is_class: bool = ...) -> None: ... + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... else: - def __init__(self, arg: str, is_argument: bool = ...) -> None: ... + def __init__(self, arg: str, is_argument: bool = True) -> None: ... if sys.version_info >= (3, 9): def _evaluate( diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index df2c1c431c65..73a41f16600d 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -6,7 +6,7 @@ import typing from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete from collections.abc import Iterable -from typing import ( # noqa: Y022,Y027,Y039 +from typing import ( # noqa: Y022,Y039 TYPE_CHECKING as TYPE_CHECKING, Any as Any, AsyncContextManager as AsyncContextManager, @@ -151,9 +151,9 @@ OrderedDict = _Alias() def get_type_hints( obj: Callable[..., Any], - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - include_extras: bool = ..., + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, ) -> dict[str, Any]: ... def get_args(tp: Any) -> tuple[Any, ...]: ... def get_origin(tp: Any) -> Any | None: ... @@ -224,9 +224,9 @@ else: def dataclass_transform( *, - eq_default: bool = ..., - order_default: bool = ..., - kw_only_default: bool = ..., + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: object, ) -> IdentityFunction: ... @@ -268,11 +268,11 @@ class TypeVar: self, name: str, *constraints: Any, - bound: Any | None = ..., - covariant: bool = ..., - contravariant: bool = ..., - default: Any | None = ..., - infer_variance: bool = ..., + bound: Any | None = None, + covariant: bool = False, + contravariant: bool = False, + default: Any | None = None, + infer_variance: bool = False, ) -> None: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... @@ -291,10 +291,10 @@ class ParamSpec: self, name: str, *, - bound: None | type[Any] | str = ..., - contravariant: bool = ..., - covariant: bool = ..., - default: type[Any] | str | None = ..., + bound: None | type[Any] | str = None, + contravariant: bool = False, + covariant: bool = False, + default: type[Any] | str | None = None, ) -> None: ... @property def args(self) -> ParamSpecArgs: ... @@ -305,7 +305,7 @@ class ParamSpec: class TypeVarTuple: __name__: str __default__: Any | None - def __init__(self, name: str, *, default: Any | None = ...) -> None: ... + def __init__(self, name: str, *, default: Any | None = None) -> None: ... def __iter__(self) -> Any: ... # Unpack[Self] def override(__arg: _F) -> _F: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 42633ed13bb8..5b1bd9288659 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -94,7 +94,7 @@ class TestCase: _testMethodName: str # undocumented _testMethodDoc: str - def __init__(self, methodName: str = ...) -> None: ... + def __init__(self, methodName: str = "runTest") -> None: ... def __eq__(self, other: object) -> bool: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @@ -102,7 +102,7 @@ class TestCase: def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... - def run(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... + def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def skipTest(self, reason: Any) -> NoReturn: ... def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... @@ -110,34 +110,34 @@ class TestCase: if sys.version_info < (3, 11): def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... - def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertNotEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertTrue(self, expr: Any, msg: Any = ...) -> None: ... - def assertFalse(self, expr: Any, msg: Any = ...) -> None: ... - def assertIs(self, expr1: object, expr2: object, msg: Any = ...) -> None: ... - def assertIsNot(self, expr1: object, expr2: object, msg: Any = ...) -> None: ... - def assertIsNone(self, obj: object, msg: Any = ...) -> None: ... - def assertIsNotNone(self, obj: object, msg: Any = ...) -> None: ... - def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... - def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... - def assertIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = ...) -> None: ... - def assertNotIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = ...) -> None: ... + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertTrue(self, expr: Any, msg: Any = None) -> None: ... + def assertFalse(self, expr: Any, msg: Any = None) -> None: ... + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNone(self, obj: object, msg: Any = None) -> None: ... + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = None) -> None: ... + def assertNotIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = None) -> None: ... @overload - def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = ...) -> None: ... + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = ...) -> None: ... + def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = ...) -> None: ... + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = ...) -> None: ... + def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... @overload - def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = ...) -> None: ... + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = ...) -> None: ... + def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = ...) -> None: ... + def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = ...) -> None: ... + def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` # are not using `ParamSpec` intentionally, # because they might be used with explicitly wrong arg types to raise some error in tests. @@ -192,74 +192,74 @@ class TestCase: self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( - self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[_LoggingWatcher]: ... if sys.version_info >= (3, 10): def assertNoLogs( - self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[None]: ... @overload def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertAlmostEqual( - self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE ) -> None: ... @overload def assertAlmostEqual( self, first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], second: _T, - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertAlmostEqual( self, first: _T, second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertNotAlmostEqual( - self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE ) -> None: ... @overload def assertNotAlmostEqual( self, first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], second: _T, - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertNotAlmostEqual( self, first: _T, second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... - def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... - def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... def assertSequenceEqual( - self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = ..., seq_type: type[Sequence[Any]] | None = ... + self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None ) -> None: ... - def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = ...) -> None: ... - def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = ...) -> None: ... - def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = ...) -> None: ... - def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = ...) -> None: ... - def fail(self, msg: Any = ...) -> NoReturn: ... + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... + def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... + def fail(self, msg: Any = None) -> NoReturn: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... @@ -302,16 +302,16 @@ class TestCase: assertNotRegexpMatches = assertNotRegex assertRaisesRegexp = assertRaisesRegex def assertDictContainsSubset( - self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = ... + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None ) -> None: ... class FunctionTestCase(TestCase): def __init__( self, testFunc: Callable[[], Any], - setUp: Callable[[], Any] | None = ..., - tearDown: Callable[[], Any] | None = ..., - description: str | None = ..., + setUp: Callable[[], Any] | None = None, + tearDown: Callable[[], Any] | None = None, + description: str | None = None, ) -> None: ... def runTest(self) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/loader.pyi b/mypy/typeshed/stdlib/unittest/loader.pyi index a1b902e0f6d6..f3850c939d07 100644 --- a/mypy/typeshed/stdlib/unittest/loader.pyi +++ b/mypy/typeshed/stdlib/unittest/loader.pyi @@ -18,11 +18,13 @@ class TestLoader: testNamePatterns: list[str] | None suiteClass: _SuiteClass def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = ...) -> unittest.suite.TestSuite: ... - def loadTestsFromName(self, name: str, module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... - def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = None) -> unittest.suite.TestSuite: ... + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... - def discover(self, start_dir: str, pattern: str = ..., top_level_dir: str | None = ...) -> unittest.suite.TestSuite: ... + def discover( + self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None + ) -> unittest.suite.TestSuite: ... def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... defaultTestLoader: TestLoader @@ -31,14 +33,14 @@ def getTestCaseNames( testCaseClass: type[unittest.case.TestCase], prefix: str, sortUsing: _SortComparisonMethod = ..., - testNamePatterns: list[str] | None = ..., + testNamePatterns: list[str] | None = None, ) -> Sequence[str]: ... def makeSuite( testCaseClass: type[unittest.case.TestCase], - prefix: str = ..., + prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ..., ) -> unittest.suite.TestSuite: ... def findTestCases( - module: ModuleType, prefix: str = ..., sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... ) -> unittest.suite.TestSuite: ... diff --git a/mypy/typeshed/stdlib/unittest/main.pyi b/mypy/typeshed/stdlib/unittest/main.pyi index 915d559cce5b..6d970c920096 100644 --- a/mypy/typeshed/stdlib/unittest/main.pyi +++ b/mypy/typeshed/stdlib/unittest/main.pyi @@ -25,23 +25,23 @@ class TestProgram: testNamePatterns: list[str] | None def __init__( self, - module: None | str | ModuleType = ..., - defaultTest: str | Iterable[str] | None = ..., - argv: list[str] | None = ..., - testRunner: type[_TestRunner] | _TestRunner | None = ..., + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, testLoader: unittest.loader.TestLoader = ..., - exit: bool = ..., - verbosity: int = ..., - failfast: bool | None = ..., - catchbreak: bool | None = ..., - buffer: bool | None = ..., - warnings: str | None = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, *, - tb_locals: bool = ..., + tb_locals: bool = False, ) -> None: ... - def usageExit(self, msg: Any = ...) -> None: ... + def usageExit(self, msg: Any = None) -> None: ... def parseArgs(self, argv: list[str]) -> None: ... - def createTests(self, from_discovery: bool = ..., Loader: unittest.loader.TestLoader | None = ...) -> None: ... + def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... def runTests(self) -> None: ... # undocumented main = TestProgram diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 47535499a9f2..54c79fd433d2 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -70,16 +70,21 @@ class _Call(tuple[Any, ...]): def __new__( cls: type[Self], value: _CallValue = ..., - name: str | None = ..., - parent: Any | None = ..., - two: bool = ..., - from_kall: bool = ..., + name: str | None = "", + parent: Any | None = None, + two: bool = False, + from_kall: bool = True, ) -> Self: ... name: Any parent: Any from_kall: Any def __init__( - self, value: _CallValue = ..., name: str | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... + self, + value: _CallValue = ..., + name: str | None = None, + parent: Any | None = None, + two: bool = False, + from_kall: bool = True, ) -> None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... @@ -106,17 +111,17 @@ class NonCallableMock(Base, Any): def __new__(__cls: type[Self], *args: Any, **kw: Any) -> Self: ... def __init__( self, - spec: list[str] | object | type[object] | None = ..., - wraps: Any | None = ..., - name: str | None = ..., - spec_set: list[str] | object | type[object] | None = ..., - parent: NonCallableMock | None = ..., - _spec_state: Any | None = ..., - _new_name: str = ..., - _new_parent: NonCallableMock | None = ..., - _spec_as_instance: bool = ..., - _eat_self: bool | None = ..., - unsafe: bool = ..., + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, **kwargs: Any, ) -> None: ... def __getattr__(self, name: str) -> Any: ... @@ -124,11 +129,11 @@ class NonCallableMock(Base, Any): def __setattr__(self, name: str, value: Any) -> None: ... def __dir__(self) -> list[str]: ... if sys.version_info >= (3, 8): - def _calls_repr(self, prefix: str = ...) -> str: ... + def _calls_repr(self, prefix: str = "Calls") -> str: ... def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_not_called(self) -> None: ... def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... - def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = ...) -> str: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... else: def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... def assert_not_called(_mock_self) -> None: ... @@ -141,13 +146,13 @@ class NonCallableMock(Base, Any): def assert_called(_mock_self) -> None: ... def assert_called_once(_mock_self) -> None: ... - def reset_mock(self, visited: Any = ..., *, return_value: bool = ..., side_effect: bool = ...) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... def _extract_mock_name(self) -> str: ... def _get_call_signature_from_name(self, name: str) -> Any: ... def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = ...) -> None: ... - def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... - def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = ..., _eat_self: bool = ...) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... + def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... def configure_mock(self, **kwargs: Any) -> None: ... return_value: Any @@ -165,16 +170,16 @@ class CallableMixin(Base): side_effect: Any def __init__( self, - spec: Any | None = ..., - side_effect: Any | None = ..., + spec: Any | None = None, + side_effect: Any | None = None, return_value: Any = ..., - wraps: Any | None = ..., - name: Any | None = ..., - spec_set: Any | None = ..., - parent: Any | None = ..., - _spec_state: Any | None = ..., - _new_name: Any = ..., - _new_parent: Any | None = ..., + wraps: Any | None = None, + name: Any | None = None, + spec_set: Any | None = None, + parent: Any | None = None, + _spec_state: Any | None = None, + _new_name: Any = "", + _new_parent: Any | None = None, **kwargs: Any, ) -> None: ... if sys.version_info >= (3, 8): @@ -212,7 +217,7 @@ class _patch(Generic[_T]): new_callable: Any | None, kwargs: Mapping[str, Any], *, - unsafe: bool = ..., + unsafe: bool = False, ) -> None: ... else: def __init__( @@ -258,7 +263,7 @@ class _patch_dict: in_dict: Any values: Any clear: Any - def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... + def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... if sys.version_info >= (3, 10): def decorate_callable(self, f: _F) -> _F: ... @@ -361,7 +366,7 @@ if sys.version_info >= (3, 8): def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = ...) -> None: ... + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... def assert_not_awaited(self) -> None: ... def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... await_count: int @@ -381,7 +386,7 @@ class MagicProxy: def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def create_mock(self) -> Any: ... - def __get__(self, obj: Any, _type: Any | None = ...) -> Any: ... + def __get__(self, obj: Any, _type: Any | None = None) -> Any: ... class _ANY: def __eq__(self, other: object) -> Literal[True]: ... @@ -392,18 +397,23 @@ ANY: Any if sys.version_info >= (3, 10): def create_autospec( spec: Any, - spec_set: Any = ..., - instance: Any = ..., - _parent: Any | None = ..., - _name: Any | None = ..., + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, *, - unsafe: bool = ..., + unsafe: bool = False, **kwargs: Any, ) -> Any: ... else: def create_autospec( - spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Any | None = ..., _name: Any | None = ..., **kwargs: Any + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + **kwargs: Any, ) -> Any: ... class _SpecState: @@ -416,18 +426,18 @@ class _SpecState: def __init__( self, spec: Any, - spec_set: Any = ..., - parent: Any | None = ..., - name: Any | None = ..., - ids: Any | None = ..., - instance: Any = ..., + spec_set: Any = False, + parent: Any | None = None, + name: Any | None = None, + ids: Any | None = None, + instance: Any = False, ) -> None: ... -def mock_open(mock: Any | None = ..., read_data: Any = ...) -> Any: ... +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... class PropertyMock(Mock): if sys.version_info >= (3, 8): - def __get__(self: Self, obj: _T, obj_type: type[_T] | None = ...) -> Self: ... + def __get__(self: Self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... else: def __get__(self: Self, obj: _T, obj_type: type[_T] | None) -> Self: ... diff --git a/mypy/typeshed/stdlib/unittest/result.pyi b/mypy/typeshed/stdlib/unittest/result.pyi index 5dfec13cb52c..8d78bc0f7dcf 100644 --- a/mypy/typeshed/stdlib/unittest/result.pyi +++ b/mypy/typeshed/stdlib/unittest/result.pyi @@ -22,7 +22,7 @@ class TestResult: buffer: bool failfast: bool tb_locals: bool - def __init__(self, stream: TextIO | None = ..., descriptions: bool | None = ..., verbosity: int | None = ...) -> None: ... + def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... def printErrors(self) -> None: ... def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/runner.pyi b/mypy/typeshed/stdlib/unittest/runner.pyi index 17514828898a..c0ddcdb49208 100644 --- a/mypy/typeshed/stdlib/unittest/runner.pyi +++ b/mypy/typeshed/stdlib/unittest/runner.pyi @@ -22,15 +22,15 @@ class TextTestRunner: resultclass: _ResultClassType def __init__( self, - stream: TextIO | None = ..., - descriptions: bool = ..., - verbosity: int = ..., - failfast: bool = ..., - buffer: bool = ..., - resultclass: _ResultClassType | None = ..., - warnings: type[Warning] | None = ..., + stream: TextIO | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: type[Warning] | None = None, *, - tb_locals: bool = ..., + tb_locals: bool = False, ) -> None: ... def _makeResult(self) -> unittest.result.TestResult: ... def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... diff --git a/mypy/typeshed/stdlib/unittest/signals.pyi b/mypy/typeshed/stdlib/unittest/signals.pyi index 89e108d926a6..a60133ada9d9 100644 --- a/mypy/typeshed/stdlib/unittest/signals.pyi +++ b/mypy/typeshed/stdlib/unittest/signals.pyi @@ -10,6 +10,6 @@ def installHandler() -> None: ... def registerResult(result: unittest.result.TestResult) -> None: ... def removeResult(result: unittest.result.TestResult) -> bool: ... @overload -def removeHandler(method: None = ...) -> None: ... +def removeHandler(method: None = None) -> None: ... @overload def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/mypy/typeshed/stdlib/unittest/suite.pyi b/mypy/typeshed/stdlib/unittest/suite.pyi index 26bef658f1cd..f6b8ef003518 100644 --- a/mypy/typeshed/stdlib/unittest/suite.pyi +++ b/mypy/typeshed/stdlib/unittest/suite.pyi @@ -19,4 +19,4 @@ class BaseTestSuite(Iterable[_TestType]): def __eq__(self, other: object) -> bool: ... class TestSuite(BaseTestSuite): - def run(self, result: unittest.result.TestResult, debug: bool = ...) -> unittest.result.TestResult: ... + def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/mypy/typeshed/stdlib/unittest/util.pyi b/mypy/typeshed/stdlib/unittest/util.pyi index f62c728760ff..845accfebedd 100644 --- a/mypy/typeshed/stdlib/unittest/util.pyi +++ b/mypy/typeshed/stdlib/unittest/util.pyi @@ -14,7 +14,7 @@ _MIN_DIFF_LEN: int def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... -def safe_repr(obj: object, short: bool = ...) -> str: ... +def safe_repr(obj: object, short: bool = False) -> str: ... def strclass(cls: type) -> str: ... def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... diff --git a/mypy/typeshed/stdlib/urllib/error.pyi b/mypy/typeshed/stdlib/urllib/error.pyi index 7a4de10d7cf6..8ea25680f1a4 100644 --- a/mypy/typeshed/stdlib/urllib/error.pyi +++ b/mypy/typeshed/stdlib/urllib/error.pyi @@ -6,7 +6,7 @@ __all__ = ["URLError", "HTTPError", "ContentTooShortError"] class URLError(IOError): reason: str | BaseException - def __init__(self, reason: str | BaseException, filename: str | None = ...) -> None: ... + def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): @property diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 8fe5d8b37ac0..cd1d9347d6f7 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -1,6 +1,7 @@ import sys from collections.abc import Callable, Iterable, Mapping, Sequence from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -43,10 +44,10 @@ class _ResultMixinBase(Generic[AnyStr]): def geturl(self) -> AnyStr: ... class _ResultMixinStr(_ResultMixinBase[str]): - def encode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinBytes: ... + def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... class _ResultMixinBytes(_ResultMixinBase[bytes]): - def decode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinStr: ... + def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): @property @@ -115,73 +116,102 @@ class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ... def parse_qs( qs: AnyStr | None, - keep_blank_values: bool = ..., - strict_parsing: bool = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> dict[AnyStr, list[AnyStr]]: ... def parse_qsl( qs: AnyStr | None, - keep_blank_values: bool = ..., - strict_parsing: bool = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> list[tuple[AnyStr, AnyStr]]: ... @overload -def quote(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... @overload -def quote(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... -def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... +def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... @overload -def quote_plus(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... @overload -def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... if sys.version_info >= (3, 9): - def unquote(string: str | bytes, encoding: str = ..., errors: str = ...) -> str: ... + def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... else: - def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... + def unquote(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... -def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... _Q = TypeVar("_Q", bound=str | Iterable[int]) +_QueryType: TypeAlias = ( + Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]] +) +@overload +def urlencode( + query: _QueryType, + doseq: bool = False, + safe: str = "", + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, str, str, str], str] = ..., +) -> str: ... +@overload +def urlencode( + query: _QueryType, + doseq: bool, + safe: _Q, + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., +) -> str: ... +@overload def urlencode( - query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], + query: _QueryType, doseq: bool = False, - safe: _Q = ..., + *, + safe: _Q, encoding: str | None = None, errors: str | None = None, quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., ) -> str: ... -def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... +@overload +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... @overload -def urlparse(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> ParseResult: ... +def urlparse(url: bytes | bytearray, scheme: bytes | bytearray | None, allow_fragments: bool = True) -> ParseResultBytes: ... @overload def urlparse( - url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... + url: None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> ParseResultBytes: ... @overload -def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> SplitResult: ... +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... if sys.version_info >= (3, 11): @overload - def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... + def urlsplit(url: bytes, scheme: bytes | None, allow_fragments: bool = True) -> SplitResultBytes: ... + @overload + def urlsplit(url: None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True) -> SplitResultBytes: ... else: + @overload + def urlsplit(url: bytes | bytearray, scheme: bytes | bytearray | None, allow_fragments: bool = True) -> SplitResultBytes: ... @overload def urlsplit( - url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... + url: None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> SplitResultBytes: ... @overload diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 00c160293762..09ce27961999 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -54,13 +54,13 @@ _DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | def urlopen( url: str | Request, - data: _DataType | None = ..., + data: _DataType | None = None, timeout: float | None = ..., *, - cafile: str | None = ..., - capath: str | None = ..., - cadefault: bool = ..., - context: ssl.SSLContext | None = ..., + cafile: str | None = None, + capath: str | None = None, + cadefault: bool = False, + context: ssl.SSLContext | None = None, ) -> _UrlopenRet: ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... @@ -79,7 +79,7 @@ if sys.platform == "win32" or sys.platform == "darwin": def proxy_bypass(host: str) -> Any: ... # undocumented else: - def proxy_bypass(host: str, proxies: Mapping[str, str] | None = ...) -> Any: ... # undocumented + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented class Request: @property @@ -101,11 +101,11 @@ class Request: def __init__( self, url: str, - data: _DataType = ..., + data: _DataType = None, headers: MutableMapping[str, str] = ..., - origin_req_host: str | None = ..., - unverifiable: bool = ..., - method: str | None = ..., + origin_req_host: str | None = None, + unverifiable: bool = False, + method: str | None = None, ) -> None: ... def get_method(self) -> str: ... def add_header(self, key: str, val: str) -> None: ... @@ -124,7 +124,7 @@ class Request: class OpenerDirector: addheaders: list[tuple[str, str]] def add_handler(self, handler: BaseHandler) -> None: ... - def open(self, fullurl: str | Request, data: _DataType = ..., timeout: float | None = ...) -> _UrlopenRet: ... + def open(self, fullurl: str | Request, data: _DataType = None, timeout: float | None = ...) -> _UrlopenRet: ... def error(self, proto: str, *args: Any) -> _UrlopenRet: ... def close(self) -> None: ... @@ -158,14 +158,14 @@ class HTTPRedirectHandler(BaseHandler): class HTTPCookieProcessor(BaseHandler): cookiejar: CookieJar - def __init__(self, cookiejar: CookieJar | None = ...) -> None: ... + def __init__(self, cookiejar: CookieJar | None = None) -> None: ... def http_request(self, request: Request) -> Request: ... # undocumented def http_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented def https_request(self, request: Request) -> Request: ... # undocumented def https_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented class ProxyHandler(BaseHandler): - def __init__(self, proxies: dict[str, str] | None = ...) -> None: ... + def __init__(self, proxies: dict[str, str] | None = None) -> None: ... def proxy_open(self, req: Request, proxy: str, type: str) -> _UrlopenRet | None: ... # undocumented # TODO add a method for every (common) proxy protocol @@ -173,7 +173,7 @@ class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... def is_suburi(self, base: str, test: str) -> bool: ... # undocumented - def reduce_uri(self, uri: str, default_port: bool = ...) -> str: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> str: ... # undocumented class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -181,16 +181,16 @@ class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): def add_password( - self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = ... + self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False ) -> None: ... - def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = ...) -> None: ... + def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ... def is_authenticated(self, authuri: str) -> bool: ... class AbstractBasicAuthHandler: rx: ClassVar[Pattern[str]] # undocumented passwd: HTTPPasswordMgr add_password: Callable[[str, str | Sequence[str], str, str], None] - def __init__(self, password_mgr: HTTPPasswordMgr | None = ...) -> None: ... + def __init__(self, password_mgr: HTTPPasswordMgr | None = None) -> None: ... def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: HTTPMessage) -> None: ... def http_request(self, req: Request) -> Request: ... # undocumented def http_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented @@ -207,7 +207,7 @@ class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... class AbstractDigestAuthHandler: - def __init__(self, passwd: HTTPPasswordMgr | None = ...) -> None: ... + def __init__(self, passwd: HTTPPasswordMgr | None = None) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... @@ -235,7 +235,7 @@ class _HTTPConnectionProtocol(Protocol): ) -> HTTPConnection: ... class AbstractHTTPHandler(BaseHandler): # undocumented - def __init__(self, debuglevel: int = ...) -> None: ... + def __init__(self, debuglevel: int = 0) -> None: ... def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... @@ -246,7 +246,7 @@ class HTTPHandler(AbstractHTTPHandler): class HTTPSHandler(AbstractHTTPHandler): def __init__( - self, debuglevel: int = ..., context: ssl.SSLContext | None = ..., check_hostname: bool | None = ... + self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None ) -> None: ... def https_open(self, req: Request) -> HTTPResponse: ... def https_request(self, request: Request) -> Request: ... # undocumented @@ -262,7 +262,7 @@ class DataHandler(BaseHandler): class ftpwrapper: # undocumented def __init__( - self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = ..., persistent: bool = ... + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True ) -> None: ... def close(self) -> None: ... def endtransfer(self) -> None: ... @@ -292,59 +292,59 @@ class HTTPErrorProcessor(BaseHandler): def urlretrieve( url: str, - filename: StrOrBytesPath | None = ..., - reporthook: Callable[[int, int, int], object] | None = ..., - data: _DataType = ..., + filename: StrOrBytesPath | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: _DataType = None, ) -> tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... class URLopener: version: ClassVar[str] - def __init__(self, proxies: dict[str, str] | None = ..., **x509: str) -> None: ... - def open(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... + def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... def retrieve( self, url: str, - filename: str | None = ..., - reporthook: Callable[[int, int, int], object] | None = ..., - data: ReadableBuffer | None = ..., + filename: str | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: ReadableBuffer | None = None, ) -> tuple[str, Message | None]: ... def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None ) -> _UrlopenRet: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: ReadableBuffer | None = ...) -> addinfourl: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented def open_file(self, url: str) -> addinfourl: ... # undocumented def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = ...) -> None: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... - def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> tuple[str, str]: ... # undocumented + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented def http_error_301( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_302( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_303( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_307( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented if sys.version_info >= (3, 11): def http_error_308( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_401( @@ -354,8 +354,8 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: ReadableBuffer | None = ..., - retry: bool = ..., + data: ReadableBuffer | None = None, + retry: bool = False, ) -> _UrlopenRet | None: ... # undocumented def http_error_407( self, @@ -364,8 +364,8 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: ReadableBuffer | None = ..., - retry: bool = ..., + data: ReadableBuffer | None = None, + retry: bool = False, ) -> _UrlopenRet | None: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage @@ -374,14 +374,14 @@ class FancyURLopener(URLopener): self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None ) -> _UrlopenRet | None: ... # undocumented def retry_http_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented def retry_https_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented def retry_proxy_http_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented def retry_proxy_https_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/urllib/response.pyi b/mypy/typeshed/stdlib/urllib/response.pyi index ca9781dbfbb4..4db1b5649c7a 100644 --- a/mypy/typeshed/stdlib/urllib/response.pyi +++ b/mypy/typeshed/stdlib/urllib/response.pyi @@ -53,6 +53,6 @@ class addinfourl(addinfo): @property def status(self) -> int | None: ... - def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = ...) -> None: ... + def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = None) -> None: ... def geturl(self) -> str: ... def getcode(self) -> int | None: ... diff --git a/mypy/typeshed/stdlib/urllib/robotparser.pyi b/mypy/typeshed/stdlib/urllib/robotparser.pyi index 795cf83fcecd..d218c3dc6c0f 100644 --- a/mypy/typeshed/stdlib/urllib/robotparser.pyi +++ b/mypy/typeshed/stdlib/urllib/robotparser.pyi @@ -9,7 +9,7 @@ class RequestRate(NamedTuple): seconds: int class RobotFileParser: - def __init__(self, url: str = ...) -> None: ... + def __init__(self, url: str = "") -> None: ... def set_url(self, url: str) -> None: ... def read(self) -> None: ... def parse(self, lines: Iterable[str]) -> None: ... diff --git a/mypy/typeshed/stdlib/uu.pyi b/mypy/typeshed/stdlib/uu.pyi index 95a7f3dfa9e2..20e79bf3fec9 100644 --- a/mypy/typeshed/stdlib/uu.pyi +++ b/mypy/typeshed/stdlib/uu.pyi @@ -7,5 +7,7 @@ _File: TypeAlias = str | BinaryIO class Error(Exception): ... -def encode(in_file: _File, out_file: _File, name: str | None = ..., mode: int | None = ..., *, backtick: bool = ...) -> None: ... -def decode(in_file: _File, out_file: _File | None = ..., mode: int | None = ..., quiet: int = ...) -> None: ... +def encode( + in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: int = False) -> None: ... diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index 3d9b89a0b9f7..249257783626 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -1,3 +1,5 @@ +import sys +from _typeshed import Unused from enum import Enum from typing_extensions import TypeAlias @@ -14,12 +16,12 @@ class SafeUUID(Enum): class UUID: def __init__( self, - hex: str | None = ..., - bytes: _Bytes | None = ..., - bytes_le: _Bytes | None = ..., - fields: _FieldsType | None = ..., - int: _Int | None = ..., - version: _Int | None = ..., + hex: str | None = None, + bytes: _Bytes | None = None, + bytes_le: _Bytes | None = None, + fields: _FieldsType | None = None, + int: _Int | None = None, + version: _Int | None = None, *, is_safe: SafeUUID = ..., ) -> None: ... @@ -64,8 +66,13 @@ class UUID: def __gt__(self, other: UUID) -> bool: ... def __ge__(self, other: UUID) -> bool: ... -def getnode() -> int: ... -def uuid1(node: _Int | None = ..., clock_seq: _Int | None = ...) -> UUID: ... +if sys.version_info >= (3, 9): + def getnode() -> int: ... + +else: + def getnode(*, getters: Unused = None) -> int: ... # undocumented + +def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ... def uuid3(namespace: UUID, name: str) -> UUID: ... def uuid4() -> UUID: ... def uuid5(namespace: UUID, name: str) -> UUID: ... diff --git a/mypy/typeshed/stdlib/venv/__init__.pyi b/mypy/typeshed/stdlib/venv/__init__.pyi index dfa0b69b0870..f184649f10f0 100644 --- a/mypy/typeshed/stdlib/venv/__init__.pyi +++ b/mypy/typeshed/stdlib/venv/__init__.pyi @@ -20,23 +20,23 @@ class EnvBuilder: if sys.version_info >= (3, 9): def __init__( self, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - upgrade: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., - upgrade_deps: bool = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... else: def __init__( self, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - upgrade: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, ) -> None: ... def create(self, env_dir: StrOrBytesPath) -> None: ... @@ -44,7 +44,7 @@ class EnvBuilder: def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... def create_configuration(self, context: SimpleNamespace) -> None: ... def symlink_or_copy( - self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = ... + self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False ) -> None: ... # undocumented def setup_python(self, context: SimpleNamespace) -> None: ... def _setup_pip(self, context: SimpleNamespace) -> None: ... # undocumented @@ -58,22 +58,22 @@ class EnvBuilder: if sys.version_info >= (3, 9): def create( env_dir: StrOrBytesPath, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., - upgrade_deps: bool = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... else: def create( env_dir: StrOrBytesPath, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, ) -> None: ... -def main(args: Sequence[str] | None = ...) -> None: ... +def main(args: Sequence[str] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/warnings.pyi b/mypy/typeshed/stdlib/warnings.pyi index 5cc6b946409b..6222eb65918a 100644 --- a/mypy/typeshed/stdlib/warnings.pyi +++ b/mypy/typeshed/stdlib/warnings.pyi @@ -22,18 +22,20 @@ _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate def showwarning( - message: Warning | str, category: type[Warning], filename: str, lineno: int, file: TextIO | None = ..., line: str | None = ... + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, ) -> None: ... -def formatwarning(message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = ...) -> str: ... +def formatwarning( + message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None +) -> str: ... def filterwarnings( - action: _ActionKind, - message: str = ..., - category: type[Warning] = ..., - module: str = ..., - lineno: int = ..., - append: bool = ..., + action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False ) -> None: ... -def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... def resetwarnings() -> None: ... class _OptionError(Exception): ... @@ -52,9 +54,9 @@ class WarningMessage: category: type[Warning], filename: str, lineno: int, - file: TextIO | None = ..., - line: str | None = ..., - source: Any | None = ..., + file: TextIO | None = None, + line: str | None = None, + source: Any | None = None, ) -> None: ... class catch_warnings(Generic[_W]): @@ -63,45 +65,45 @@ class catch_warnings(Generic[_W]): def __init__( self: catch_warnings[None], *, - record: Literal[False] = ..., - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + record: Literal[False] = False, + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage] | None], *, record: bool, - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... else: @overload - def __init__(self: catch_warnings[None], *, record: Literal[False] = ..., module: ModuleType | None = ...) -> None: ... + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... @overload def __init__( - self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = ... + self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None ) -> None: ... @overload def __init__( - self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = ... + self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = None ) -> None: ... def __enter__(self) -> _W: ... diff --git a/mypy/typeshed/stdlib/wave.pyi b/mypy/typeshed/stdlib/wave.pyi index 853a26a9469e..3817ae09307f 100644 --- a/mypy/typeshed/stdlib/wave.pyi +++ b/mypy/typeshed/stdlib/wave.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer, Self, Unused from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, overload from typing_extensions import Literal, TypeAlias @@ -25,7 +25,7 @@ class _wave_params(NamedTuple): class Wave_read: def __init__(self, f: _File) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> BinaryIO | None: ... def rewind(self) -> None: ... def close(self) -> None: ... @@ -45,7 +45,7 @@ class Wave_read: class Wave_write: def __init__(self, f: _File) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... @@ -72,7 +72,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): openfp = open diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index 9a619235e689..a0f35b4f51eb 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -41,7 +41,7 @@ _P = ParamSpec("_P") ProxyTypes: tuple[type[Any], ...] class WeakMethod(ref[_CallableT], Generic[_CallableT]): - def __new__(cls: type[Self], meth: _CallableT, callback: Callable[[_CallableT], object] | None = ...) -> Self: ... + def __new__(cls: type[Self], meth: _CallableT, callback: Callable[[_CallableT], object] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... @@ -70,7 +70,7 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + def setdefault(self, key: _KT, default: _VT) -> _VT: ... # type: ignore[override] @overload def pop(self, key: _KT) -> _VT: ... @overload @@ -92,7 +92,7 @@ class KeyedRef(ref[_T], Generic[_KT, _T]): class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload - def __init__(self, dict: None = ...) -> None: ... + def __init__(self, dict: None = None) -> None: ... @overload def __init__(self, dict: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... def __len__(self) -> int: ... @@ -109,7 +109,11 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def keyrefs(self) -> list[ref[_KT]]: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences + @overload + def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @overload @@ -125,7 +129,7 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): class finalize: # TODO: This is a good candidate for to be a `Generic[_P, _T]` class def __init__(self, __obj: object, __func: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... - def __call__(self, _: Any = ...) -> Any | None: ... + def __call__(self, _: Any = None) -> Any | None: ... def detach(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... def peek(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... @property diff --git a/mypy/typeshed/stdlib/webbrowser.pyi b/mypy/typeshed/stdlib/webbrowser.pyi index 8cf8935ffaad..d15ae49fd1e8 100644 --- a/mypy/typeshed/stdlib/webbrowser.pyi +++ b/mypy/typeshed/stdlib/webbrowser.pyi @@ -8,10 +8,10 @@ __all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] class Error(Exception): ... def register( - name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = ..., *, preferred: bool = ... + name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False ) -> None: ... -def get(using: str | None = ...) -> BaseBrowser: ... -def open(url: str, new: int = ..., autoraise: bool = ...) -> bool: ... +def get(using: str | None = None) -> BaseBrowser: ... +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... def open_new(url: str) -> bool: ... def open_new_tab(url: str) -> bool: ... @@ -19,20 +19,20 @@ class BaseBrowser: args: list[str] name: str basename: str - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... @abstractmethod - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... def open_new(self, url: str) -> bool: ... def open_new_tab(self, url: str) -> bool: ... class GenericBrowser(BaseBrowser): def __init__(self, name: str | Sequence[str]) -> None: ... - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class BackgroundBrowser(GenericBrowser): ... class UnixBrowser(BaseBrowser): - def open(self, url: str, new: Literal[0, 1, 2] = ..., autoraise: bool = ...) -> bool: ... # type: ignore[override] + def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool redirect_stdout: bool @@ -51,18 +51,18 @@ class Opera(UnixBrowser): ... class Elinks(UnixBrowser): ... class Konqueror(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class Grail(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "win32": class WindowsDefault(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "darwin": class MacOSX(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi index 2cc42318f1a4..6377492babc7 100644 --- a/mypy/typeshed/stdlib/winreg.pyi +++ b/mypy/typeshed/stdlib/winreg.pyi @@ -9,17 +9,17 @@ if sys.platform == "win32": def CloseKey(__hkey: _KeyType) -> None: ... def ConnectRegistry(__computer_name: str | None, __key: _KeyType) -> HKEYType: ... def CreateKey(__key: _KeyType, __sub_key: str | None) -> HKEYType: ... - def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = ..., access: int = ...) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... def DeleteKey(__key: _KeyType, __sub_key: str) -> None: ... - def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = ..., reserved: int = ...) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... def DeleteValue(__key: _KeyType, __value: str) -> None: ... def EnumKey(__key: _KeyType, __index: int) -> str: ... def EnumValue(__key: _KeyType, __index: int) -> tuple[str, Any, int]: ... def ExpandEnvironmentStrings(__str: str) -> str: ... def FlushKey(__key: _KeyType) -> None: ... def LoadKey(__key: _KeyType, __sub_key: str, __file_name: str) -> None: ... - def OpenKey(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... - def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... + def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... def QueryInfoKey(__key: _KeyType) -> tuple[int, int, int]: ... def QueryValue(__key: _KeyType, __sub_key: str | None) -> str: ... def QueryValueEx(__key: _KeyType, __name: str) -> tuple[Any, int]: ... diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi index fd5a552cf9c1..9b2b57a38986 100644 --- a/mypy/typeshed/stdlib/winsound.pyi +++ b/mypy/typeshed/stdlib/winsound.pyi @@ -25,4 +25,4 @@ if sys.platform == "win32": def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... @overload def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... - def MessageBeep(type: int = ...) -> None: ... + def MessageBeep(type: int = 0) -> None: ... diff --git a/mypy/typeshed/stdlib/wsgiref/handlers.pyi b/mypy/typeshed/stdlib/wsgiref/handlers.pyi index 655fba668598..ebead540018e 100644 --- a/mypy/typeshed/stdlib/wsgiref/handlers.pyi +++ b/mypy/typeshed/stdlib/wsgiref/handlers.pyi @@ -38,7 +38,7 @@ class BaseHandler: def set_content_length(self) -> None: ... def cleanup_headers(self) -> None: ... def start_response( - self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ... + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None ) -> Callable[[bytes], None]: ... def send_preamble(self) -> None: ... def write(self, data: bytes) -> None: ... @@ -73,8 +73,8 @@ class SimpleHandler(BaseHandler): stdout: IO[bytes], stderr: ErrorStream, environ: MutableMapping[str, str], - multithread: bool = ..., - multiprocess: bool = ..., + multithread: bool = True, + multiprocess: bool = False, ) -> None: ... def get_stdin(self) -> InputStream: ... def get_stderr(self) -> ErrorStream: ... diff --git a/mypy/typeshed/stdlib/wsgiref/headers.pyi b/mypy/typeshed/stdlib/wsgiref/headers.pyi index dd963d9b4727..2654d79bf4e5 100644 --- a/mypy/typeshed/stdlib/wsgiref/headers.pyi +++ b/mypy/typeshed/stdlib/wsgiref/headers.pyi @@ -7,7 +7,7 @@ _HeaderList: TypeAlias = list[tuple[str, str]] tspecials: Pattern[str] # undocumented class Headers: - def __init__(self, headers: _HeaderList | None = ...) -> None: ... + def __init__(self, headers: _HeaderList | None = None) -> None: ... def __len__(self) -> int: ... def __setitem__(self, name: str, val: str) -> None: ... def __delitem__(self, name: str) -> None: ... @@ -17,7 +17,7 @@ class Headers: @overload def get(self, name: str, default: str) -> str: ... @overload - def get(self, name: str, default: str | None = ...) -> str | None: ... + def get(self, name: str, default: str | None = None) -> str | None: ... def keys(self) -> list[str]: ... def values(self) -> list[str]: ... def items(self) -> _HeaderList: ... diff --git a/mypy/typeshed/stdlib/wsgiref/util.pyi b/mypy/typeshed/stdlib/wsgiref/util.pyi index 36e5c1e69676..962fac2c5a22 100644 --- a/mypy/typeshed/stdlib/wsgiref/util.pyi +++ b/mypy/typeshed/stdlib/wsgiref/util.pyi @@ -9,7 +9,7 @@ class FileWrapper: filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists - def __init__(self, filelike: IO[bytes], blksize: int = ...) -> None: ... + def __init__(self, filelike: IO[bytes], blksize: int = 8192) -> None: ... if sys.version_info < (3, 11): def __getitem__(self, key: Any) -> bytes: ... @@ -18,7 +18,7 @@ class FileWrapper: def guess_scheme(environ: WSGIEnvironment) -> str: ... def application_uri(environ: WSGIEnvironment) -> str: ... -def request_uri(environ: WSGIEnvironment, include_query: bool = ...) -> str: ... +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... def shift_path_info(environ: WSGIEnvironment) -> str | None: ... def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... def is_hop_by_hop(header_name: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/xml/dom/domreg.pyi b/mypy/typeshed/stdlib/xml/dom/domreg.pyi index 5a276ae5f561..a46d3ff090e6 100644 --- a/mypy/typeshed/stdlib/xml/dom/domreg.pyi +++ b/mypy/typeshed/stdlib/xml/dom/domreg.pyi @@ -5,4 +5,6 @@ well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... -def getDOMImplementation(name: str | None = ..., features: str | Iterable[tuple[str, str | None]] = ...) -> DOMImplementation: ... +def getDOMImplementation( + name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ... +) -> DOMImplementation: ... diff --git a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi index e460d6b21afa..45f0af7aa979 100644 --- a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -14,7 +14,7 @@ theDOMImplementation: DOMImplementation | None class ElementInfo: tagName: Incomplete - def __init__(self, tagName, model: Incomplete | None = ...) -> None: ... + def __init__(self, tagName, model: Incomplete | None = None) -> None: ... def getAttributeType(self, aname) -> TypeInfo: ... def getAttributeTypeNS(self, namespaceURI, localName) -> TypeInfo: ... def isElementContent(self) -> bool: ... @@ -25,7 +25,7 @@ class ElementInfo: class ExpatBuilder: document: Document # Created in self.reset() curNode: Incomplete # Created in self.reset() - def __init__(self, options: Options | None = ...) -> None: ... + def __init__(self, options: Options | None = None) -> None: ... def createParser(self): ... def getParser(self): ... def reset(self) -> None: ... @@ -71,7 +71,7 @@ class FragmentBuilder(ExpatBuilder): fragment: Incomplete | None originalDocument: Incomplete context: Incomplete - def __init__(self, context, options: Options | None = ...) -> None: ... + def __init__(self, context, options: Options | None = None) -> None: ... class Namespaces: def createParser(self): ... @@ -93,8 +93,8 @@ class InternalSubsetExtractor(ExpatBuilder): def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name, attrs) -> NoReturn: ... -def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = ...): ... -def parseString(string: str | ReadableBuffer, namespaces: bool = ...): ... -def parseFragment(file, context, namespaces: bool = ...): ... -def parseFragmentString(string: str, context, namespaces: bool = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True): ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True): ... +def parseFragment(file, context, namespaces: bool = True): ... +def parseFragmentString(string: str, context, namespaces: bool = True): ... def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index 5997e031fd73..d996f66984f9 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -5,9 +5,9 @@ from typing_extensions import Literal from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader -def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... -def parseString(string: str | ReadableBuffer, parser: XMLReader | None = ...): ... -def getDOMImplementation(features=...) -> DOMImplementation | None: ... +def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None): ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None): ... +def getDOMImplementation(features=None) -> DOMImplementation | None: ... class Node(xml.dom.Node): namespaceURI: str | None @@ -24,11 +24,13 @@ class Node(xml.dom.Node): def localName(self) -> str | None: ... def __bool__(self) -> Literal[True]: ... if sys.version_info >= (3, 9): - def toxml(self, encoding: str | None = ..., standalone: bool | None = ...): ... - def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: str | None = ..., standalone: bool | None = ...): ... + def toxml(self, encoding: str | None = None, standalone: bool | None = None): ... + def toprettyxml( + self, indent: str = "\t", newl: str = "\n", encoding: str | None = None, standalone: bool | None = None + ): ... else: - def toxml(self, encoding: str | None = ...): ... - def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: str | None = ...): ... + def toxml(self, encoding: str | None = None): ... + def toprettyxml(self, indent: str = "\t", newl: str = "\n", encoding: str | None = None): ... def hasChildNodes(self) -> bool: ... def insertBefore(self, newChild, refChild): ... @@ -69,7 +71,7 @@ class Attr(Node): value: str prefix: Incomplete def __init__( - self, qName: str, namespaceURI: str | None = ..., localName: str | None = ..., prefix: Incomplete | None = ... + self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: Incomplete | None = None ) -> None: ... def unlink(self) -> None: ... @property @@ -86,7 +88,7 @@ class NamedNodeMap: def keys(self): ... def keysNS(self): ... def values(self): ... - def get(self, name: str, value: Incomplete | None = ...): ... + def get(self, name: str, value: Incomplete | None = None): ... def __len__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __ge__(self, other: NamedNodeMap) -> bool: ... @@ -124,7 +126,7 @@ class Element(Node): childNodes: Incomplete nextSibling: Incomplete def __init__( - self, tagName, namespaceURI: str | None = ..., prefix: Incomplete | None = ..., localName: Incomplete | None = ... + self, tagName, namespaceURI: str | None = None, prefix: Incomplete | None = None, localName: Incomplete | None = None ) -> None: ... def unlink(self) -> None: ... def getAttribute(self, attname: str) -> str: ... @@ -143,7 +145,7 @@ class Element(Node): def hasAttributeNS(self, namespaceURI: str, localName) -> bool: ... def getElementsByTagName(self, name: str): ... def getElementsByTagNameNS(self, namespaceURI: str, localName): ... - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def hasAttributes(self) -> bool: ... def setIdAttribute(self, name) -> None: ... def setIdAttributeNS(self, namespaceURI: str, localName) -> None: ... @@ -170,7 +172,7 @@ class ProcessingInstruction(Childless, Node): def __init__(self, target, data) -> None: ... nodeValue: Incomplete nodeName: Incomplete - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CharacterData(Childless, Node): ownerDocument: Incomplete @@ -193,7 +195,7 @@ class Text(CharacterData): attributes: Incomplete data: Incomplete def splitText(self, offset): ... - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def replaceWholeText(self, content): ... @property def isWhitespaceInElementContent(self) -> bool: ... @@ -204,12 +206,12 @@ class Comment(CharacterData): nodeType: int nodeName: str def __init__(self, data) -> None: ... - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CDATASection(Text): nodeType: int nodeName: str - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class ReadOnlySequentialNamedNodeMap: def __init__(self, seq=...) -> None: ... @@ -239,7 +241,7 @@ class DocumentType(Identified, Childless, Node): nodeName: Incomplete def __init__(self, qualifiedName: str) -> None: ... def cloneNode(self, deep): ... - def writexml(self, writer: SupportsWrite[str], indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class Entity(Identified, Node): attributes: Incomplete @@ -321,20 +323,20 @@ class Document(Node, DocumentLS): def writexml( self, writer: SupportsWrite[str], - indent: str = ..., - addindent: str = ..., - newl: str = ..., - encoding: str | None = ..., - standalone: bool | None = ..., + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: str | None = None, + standalone: bool | None = None, ) -> None: ... else: def writexml( self, writer: SupportsWrite[str], - indent: str = ..., - addindent: str = ..., - newl: str = ..., - encoding: Incomplete | None = ..., + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: Incomplete | None = None, ) -> None: ... def renameNode(self, n, namespaceURI: str, name): ... diff --git a/mypy/typeshed/stdlib/xml/dom/pulldom.pyi b/mypy/typeshed/stdlib/xml/dom/pulldom.pyi index b4c03a1dd590..920905160e43 100644 --- a/mypy/typeshed/stdlib/xml/dom/pulldom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/pulldom.pyi @@ -39,7 +39,7 @@ class PullDOM(ContentHandler): lastEvent: Incomplete elementStack: Sequence[Incomplete] pending_events: Sequence[Incomplete] - def __init__(self, documentFactory: _DocumentFactory = ...) -> None: ... + def __init__(self, documentFactory: _DocumentFactory = None) -> None: ... def pop(self) -> Element: ... def setDocumentLocator(self, locator) -> None: ... def startPrefixMapping(self, prefix, uri) -> None: ... @@ -88,6 +88,6 @@ class SAX2DOM(PullDOM): default_bufsize: int def parse( - stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ... + stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = None, bufsize: int | None = None ) -> DOMEventStream: ... -def parseString(string: str, parser: XMLReader | None = ...) -> DOMEventStream: ... +def parseString(string: str, parser: XMLReader | None = None) -> DOMEventStream: ... diff --git a/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi index 341d717e043b..c07e4ba2465e 100644 --- a/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, Unused from typing import Any, NoReturn from typing_extensions import Literal, TypeAlias from urllib.request import OpenerDirector @@ -66,7 +66,7 @@ class DOMBuilder: # `input` and `cnode` argtypes for `parseWithContext` are unknowable # as the function does nothing with them, and always raises an exception. # But `input` is *probably* `DOMInputSource`? - def parseWithContext(self, input: object, cnode: object, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + def parseWithContext(self, input: Unused, cnode: Unused, action: Literal[1, 2, 3, 4]) -> NoReturn: ... class DOMEntityResolver: def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... @@ -86,9 +86,8 @@ class DOMBuilderFilter: FILTER_SKIP: Literal[3] FILTER_INTERRUPT: Literal[4] whatToShow: int - # The argtypes for acceptNode and startContainer appear to be irrelevant. - def acceptNode(self, element: object) -> Literal[1]: ... - def startContainer(self, element: object) -> Literal[1]: ... + def acceptNode(self, element: Unused) -> Literal[1]: ... + def startContainer(self, element: Unused) -> Literal[1]: ... class DocumentLS: async_: bool @@ -97,8 +96,8 @@ class DocumentLS: # so the argtypes of `uri` and `source` are unknowable. # `source` is *probably* `DOMInputSource`? # `uri` is *probably* a str? (see DOMBuilder.parseURI()) - def load(self, uri: object) -> NoReturn: ... - def loadXML(self, source: object) -> NoReturn: ... + def load(self, uri: Unused) -> NoReturn: ... + def loadXML(self, source: Unused) -> NoReturn: ... def saveXML(self, snode: Node | None) -> str: ... class DOMImplementationLS: diff --git a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi index 43b394bd67ec..cbba15dd3ebe 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -12,17 +12,17 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = ...) -> str | Element: ... +def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = None) -> str | Element: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument # Which can't be represented using Callable... if sys.version_info >= (3, 9): def include( - elem: Element, loader: Callable[..., str | Element] | None = ..., base_url: str | None = ..., max_depth: int | None = ... + elem: Element, loader: Callable[..., str | Element] | None = None, base_url: str | None = None, max_depth: int | None = 6 ) -> None: ... class LimitedRecursiveIncludeError(FatalIncludeError): ... else: - def include(elem: Element, loader: Callable[..., str | Element] | None = ...) -> None: ... + def include(elem: Element, loader: Callable[..., str | Element] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi b/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi index 94ce933582dd..c3f6207ea241 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi @@ -10,7 +10,7 @@ _Token: TypeAlias = tuple[str, str] _Next: TypeAlias = Callable[[], _Token] _Callback: TypeAlias = Callable[[_SelectorContext, list[Element]], Generator[Element, None, None]] -def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = ...) -> Generator[_Token, None, None]: ... +def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = None) -> Generator[_Token, None, None]: ... def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... def prepare_child(next: _Next, token: _Token) -> _Callback: ... def prepare_star(next: _Next, token: _Token) -> _Callback: ... @@ -28,7 +28,7 @@ class _SelectorContext: _T = TypeVar("_T") -def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... -def find(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... -def findall(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... -def findtext(elem: Element, path: str, default: _T | None = ..., namespaces: dict[str, str] | None = ...) -> _T | str: ... +def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... +def find(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... +def findall(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... +def findtext(elem: Element, path: str, default: _T | None = None, namespaces: dict[str, str] | None = None) -> _T | str: ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index 2b6191a395c3..db33b2d673d7 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -54,10 +54,10 @@ def iselement(element: object) -> TypeGuard[Element]: ... if sys.version_info >= (3, 8): @overload def canonicalize( - xml_data: str | ReadableBuffer | None = ..., + xml_data: str | ReadableBuffer | None = None, *, - out: None = ..., - from_file: _FileRead | None = ..., + out: None = None, + from_file: _FileRead | None = None, with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -68,10 +68,10 @@ if sys.version_info >= (3, 8): ) -> str: ... @overload def canonicalize( - xml_data: str | ReadableBuffer | None = ..., + xml_data: str | ReadableBuffer | None = None, *, out: SupportsWrite[str], - from_file: _FileRead | None = ..., + from_file: _FileRead | None = None, with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -90,20 +90,20 @@ class Element: def append(self, __subelement: Element) -> None: ... def clear(self) -> None: ... def extend(self, __elements: Iterable[Element]) -> None: ... - def find(self, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... - def findall(self, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... @overload - def findtext(self, path: str, default: None = ..., namespaces: dict[str, str] | None = ...) -> str | None: ... + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @overload - def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = ...) -> _T | str: ... + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... @overload - def get(self, key: str, default: None = ...) -> str | None: ... + def get(self, key: str, default: None = None) -> str | None: ... @overload def get(self, key: str, default: _T) -> str | _T: ... def insert(self, __index: int, __subelement: Element) -> None: ... def items(self) -> ItemsView[str, str]: ... - def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... - def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def itertext(self) -> Generator[str, None, None]: ... def keys(self) -> dict_keys[str, str]: ... # makeelement returns the type of self in Python impl, but not in C impl @@ -126,17 +126,17 @@ class Element: def __setitem__(self, __s: slice, __o: Iterable[Element]) -> None: ... if sys.version_info < (3, 9): def getchildren(self) -> list[Element]: ... - def getiterator(self, tag: str | None = ...) -> list[Element]: ... + def getiterator(self, tag: str | None = None) -> list[Element]: ... def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... -def Comment(text: str | None = ...) -> Element: ... -def ProcessingInstruction(target: str, text: str | None = ...) -> Element: ... +def Comment(text: str | None = None) -> Element: ... +def ProcessingInstruction(target: str, text: str | None = None) -> Element: ... PI: Callable[..., Element] class QName: text: str - def __init__(self, text_or_uri: str, tag: str | None = ...) -> None: ... + def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... def __lt__(self, other: QName | str) -> bool: ... def __le__(self, other: QName | str) -> bool: ... def __gt__(self, other: QName | str) -> bool: ... @@ -144,29 +144,29 @@ class QName: def __eq__(self, other: object) -> bool: ... class ElementTree: - def __init__(self, element: Element | None = ..., file: _FileRead | None = ...) -> None: ... + def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... def getroot(self) -> Element | Any: ... - def parse(self, source: _FileRead, parser: XMLParser | None = ...) -> Element: ... - def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... if sys.version_info < (3, 9): - def getiterator(self, tag: str | None = ...) -> list[Element]: ... + def getiterator(self, tag: str | None = None) -> list[Element]: ... - def find(self, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... @overload - def findtext(self, path: str, default: None = ..., namespaces: dict[str, str] | None = ...) -> str | None: ... + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @overload - def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = ...) -> _T | str: ... - def findall(self, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... - def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( self, file_or_filename: _FileWrite, - encoding: str | None = ..., - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - method: str | None = ..., + encoding: str | None = None, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + method: str | None = None, *, - short_empty_elements: bool = ..., + short_empty_elements: bool = True, ) -> None: ... def write_c14n(self, file: _FileWriteC14N) -> None: ... @@ -176,113 +176,113 @@ if sys.version_info >= (3, 8): @overload def tostring( element: Element, - encoding: None = ..., - method: str | None = ..., + encoding: None = None, + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> bytes: ... @overload def tostring( element: Element, encoding: Literal["unicode"], - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> str: ... @overload def tostring( element: Element, encoding: str, - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> Any: ... @overload def tostringlist( element: Element, - encoding: None = ..., - method: str | None = ..., + encoding: None = None, + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[bytes]: ... @overload def tostringlist( element: Element, encoding: Literal["unicode"], - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[str]: ... @overload def tostringlist( element: Element, encoding: str, - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[Any]: ... else: @overload def tostring( - element: Element, encoding: None = ..., method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True ) -> bytes: ... @overload def tostring( - element: Element, encoding: Literal["unicode"], method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True ) -> str: ... @overload - def tostring(element: Element, encoding: str, method: str | None = ..., *, short_empty_elements: bool = ...) -> Any: ... + def tostring(element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True) -> Any: ... @overload def tostringlist( - element: Element, encoding: None = ..., method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True ) -> list[bytes]: ... @overload def tostringlist( - element: Element, encoding: Literal["unicode"], method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True ) -> list[str]: ... @overload def tostringlist( - element: Element, encoding: str, method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True ) -> list[Any]: ... def dump(elem: Element) -> None: ... if sys.version_info >= (3, 9): - def indent(tree: Element | ElementTree, space: str = ..., level: int = ...) -> None: ... + def indent(tree: Element | ElementTree, space: str = " ", level: int = 0) -> None: ... -def parse(source: _FileRead, parser: XMLParser | None = ...) -> ElementTree: ... +def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: ... def iterparse( - source: _FileRead, events: Sequence[str] | None = ..., parser: XMLParser | None = ... + source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None ) -> Iterator[tuple[str, Any]]: ... class XMLPullParser: - def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... + def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser | None = None) -> None: ... def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. # Use `Any` to avoid false-positive errors. def read_events(self) -> Iterator[tuple[str, Any]]: ... -def XML(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> Element: ... -def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = ...) -> Element: ... +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -321,7 +321,7 @@ class TreeBuilder: if sys.version_info >= (3, 8): # These two methods have pos-only parameters in the C implementation def comment(self, __text: str | None) -> Element: ... - def pi(self, __target: str, __text: str | None = ...) -> Element: ... + def pi(self, __target: str, __text: str | None = None) -> Element: ... if sys.version_info >= (3, 8): class C14NWriterTarget: @@ -329,13 +329,13 @@ if sys.version_info >= (3, 8): self, write: Callable[[str], object], *, - with_comments: bool = ..., - strip_text: bool = ..., - rewrite_prefixes: bool = ..., - qname_aware_tags: Iterable[str] | None = ..., - qname_aware_attrs: Iterable[str] | None = ..., - exclude_attrs: Iterable[str] | None = ..., - exclude_tags: Iterable[str] | None = ..., + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, ) -> None: ... def data(self, data: str) -> None: ... def start_ns(self, prefix: str, uri: str) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index b8ab4d439e74..a591258db801 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -9,7 +9,7 @@ class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): def close(self) -> None: ... class SAXException(Exception): - def __init__(self, msg: str, exception: Exception | None = ...) -> None: ... + def __init__(self, msg: str, exception: Exception | None = None) -> None: ... def getMessage(self) -> str: ... def getException(self) -> Exception: ... def __getitem__(self, ix: Any) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi index 1361949d0c3e..67a06d2fcda2 100644 --- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi @@ -11,9 +11,9 @@ def quoteattr(data: str, entities: Mapping[str, str] = ...) -> str: ... class XMLGenerator(handler.ContentHandler): def __init__( self, - out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = ..., - encoding: str = ..., - short_empty_elements: bool = ..., + out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = None, + encoding: str = "iso-8859-1", + short_empty_elements: bool = False, ) -> None: ... def startDocument(self): ... def endDocument(self): ... @@ -28,7 +28,7 @@ class XMLGenerator(handler.ContentHandler): def processingInstruction(self, target, data): ... class XMLFilterBase(xmlreader.XMLReader): - def __init__(self, parent: xmlreader.XMLReader | None = ...) -> None: ... + def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... @@ -57,4 +57,4 @@ class XMLFilterBase(xmlreader.XMLReader): def getParent(self): ... def setParent(self, parent): ... -def prepare_input_source(source, base=...): ... +def prepare_input_source(source, base=""): ... diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi index 4480f4098635..0bf167b04a37 100644 --- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -17,7 +17,7 @@ class XMLReader: def setProperty(self, name, value): ... class IncrementalParser(XMLReader): - def __init__(self, bufsize: int = ...) -> None: ... + def __init__(self, bufsize: int = 65536) -> None: ... def parse(self, source): ... def feed(self, data): ... def prepareParser(self, source): ... @@ -31,7 +31,7 @@ class Locator: def getSystemId(self): ... class InputSource: - def __init__(self, system_id: str | None = ...) -> None: ... + def __init__(self, system_id: str | None = None) -> None: ... def setPublicId(self, public_id): ... def getPublicId(self): ... def setSystemId(self, system_id): ... @@ -57,7 +57,7 @@ class AttributesImpl: def __getitem__(self, name): ... def keys(self): ... def __contains__(self, name): ... - def get(self, name, alternative=...): ... + def get(self, name, alternative=None): ... def copy(self): ... def items(self): ... def values(self): ... diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 0e048f57844d..536cd6382d0b 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -55,7 +55,6 @@ INTERNAL_ERROR: int # undocumented class Error(Exception): ... class ProtocolError(Error): - url: str errcode: int errmsg: str @@ -65,7 +64,6 @@ class ProtocolError(Error): class ResponseError(Error): ... class Fault(Error): - faultCode: int faultString: str def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... @@ -77,9 +75,8 @@ def _iso8601_format(value: datetime) -> str: ... # undocumented def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: - value: str # undocumented - def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = ...) -> None: ... + def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... def __lt__(self, other: _DateTimeComparable) -> bool: ... def __le__(self, other: _DateTimeComparable) -> bool: ... def __gt__(self, other: _DateTimeComparable) -> bool: ... @@ -94,9 +91,8 @@ def _datetime(data: Any) -> DateTime: ... # undocumented def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: - data: bytes - def __init__(self, data: bytes | bytearray | None = ...) -> None: ... + def __init__(self, data: bytes | bytearray | None = None) -> None: ... def decode(self, data: ReadableBuffer) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -119,7 +115,7 @@ class Marshaller: data: None encoding: str | None allow_none: bool - def __init__(self, encoding: str | None = ..., allow_none: bool = ...) -> None: ... + def __init__(self, encoding: str | None = None, allow_none: bool = False) -> None: ... def dumps(self, values: Fault | Iterable[_Marshallable]) -> str: ... def __dump(self, value: _Marshallable, write: _WriteCallback) -> None: ... # undocumented def dump_nil(self, value: None, write: _WriteCallback) -> None: ... @@ -137,7 +133,6 @@ class Marshaller: def dump_instance(self, value: object, write: _WriteCallback) -> None: ... class Unmarshaller: - dispatch: dict[str, Callable[[Unmarshaller, str], None]] _type: str | None @@ -150,7 +145,7 @@ class Unmarshaller: append: Callable[[Any], None] _use_datetime: bool _use_builtin_types: bool - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... def close(self) -> tuple[_Marshallable, ...]: ... def getmethodname(self) -> str | None: ... def xml(self, encoding: str, standalone: Any) -> None: ... # Standalone is ignored @@ -174,7 +169,6 @@ class Unmarshaller: def end_methodName(self, data: str) -> None: ... class _MultiCallMethod: # undocumented - __call_list: list[tuple[str, tuple[_Marshallable, ...]]] __name: str def __init__(self, call_list: list[tuple[str, _Marshallable]], name: str) -> None: ... @@ -182,13 +176,11 @@ class _MultiCallMethod: # undocumented def __call__(self, *args: _Marshallable) -> None: ... class MultiCallIterator: # undocumented - results: list[list[_Marshallable]] def __init__(self, results: list[list[_Marshallable]]) -> None: ... def __getitem__(self, i: int) -> _Marshallable: ... class MultiCall: - __server: ServerProxy __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... @@ -200,25 +192,25 @@ FastMarshaller: Marshaller | None FastParser: ExpatParser | None FastUnmarshaller: Unmarshaller | None -def getparser(use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[ExpatParser, Unmarshaller]: ... +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... def dumps( params: Fault | tuple[_Marshallable, ...], - methodname: str | None = ..., - methodresponse: bool | None = ..., - encoding: str | None = ..., - allow_none: bool = ..., + methodname: str | None = None, + methodresponse: bool | None = None, + encoding: str | None = None, + allow_none: bool = False, ) -> str: ... -def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[tuple[_Marshallable, ...], str | None]: ... +def loads( + data: str, use_datetime: bool = False, use_builtin_types: bool = False +) -> tuple[tuple[_Marshallable, ...], str | None]: ... def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented -def gzip_decode(data: ReadableBuffer, max_decode: int = ...) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented class GzipDecodedResponse(gzip.GzipFile): # undocumented - io: BytesIO def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... class _Method: # undocumented - __send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable] __name: str def __init__(self, send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... @@ -226,7 +218,6 @@ class _Method: # undocumented def __call__(self, *args: _Marshallable) -> _Marshallable: ... class Transport: - user_agent: str accept_gzip_encoding: bool encode_threshold: int | None @@ -239,16 +230,16 @@ class Transport: if sys.version_info >= (3, 8): def __init__( - self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, headers: Iterable[tuple[str, str]] = ... + self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ... ) -> None: ... else: - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... def request( - self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False ) -> tuple[_Marshallable, ...]: ... def single_request( - self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False ) -> tuple[_Marshallable, ...]: ... def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... @@ -262,23 +253,23 @@ class Transport: def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): - if sys.version_info >= (3, 8): def __init__( self, - use_datetime: bool = ..., - use_builtin_types: bool = ..., + use_datetime: bool = False, + use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ..., - context: Any | None = ..., + context: Any | None = None, ) -> None: ... else: - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, context: Any | None = ...) -> None: ... + def __init__( + self, use_datetime: bool = False, use_builtin_types: bool = False, *, context: Any | None = None + ) -> None: ... def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... class ServerProxy: - __host: str __handler: str __transport: Transport @@ -290,28 +281,28 @@ class ServerProxy: def __init__( self, uri: str, - transport: Transport | None = ..., - encoding: str | None = ..., - verbose: bool = ..., - allow_none: bool = ..., - use_datetime: bool = ..., - use_builtin_types: bool = ..., + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ..., - context: Any | None = ..., + context: Any | None = None, ) -> None: ... else: def __init__( self, uri: str, - transport: Transport | None = ..., - encoding: str | None = ..., - verbose: bool = ..., - allow_none: bool = ..., - use_datetime: bool = ..., - use_builtin_types: bool = ..., + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, *, - context: Any | None = ..., + context: Any | None = None, ) -> None: ... def __getattr__(self, name: str) -> _Method: ... diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index 4d28974cbbed..800c205513c6 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -32,26 +32,25 @@ _DispatchProtocol: TypeAlias = ( _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN ) -def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = ...) -> Any: ... # undocumented +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented def list_public_methods(obj: Any) -> list[str]: ... # undocumented class SimpleXMLRPCDispatcher: # undocumented - funcs: dict[str, _DispatchProtocol] instance: Any | None allow_none: bool encoding: str use_builtin_types: bool - def __init__(self, allow_none: bool = ..., encoding: str | None = ..., use_builtin_types: bool = ...) -> None: ... - def register_instance(self, instance: Any, allow_dotted_names: bool = ...) -> None: ... - def register_function(self, function: _DispatchProtocol | None = ..., name: str | None = ...) -> Callable[..., Any]: ... + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... def register_introspection_functions(self) -> None: ... def register_multicall_functions(self) -> None: ... def _marshaled_dispatch( self, data: str, - dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = ..., - path: Any | None = ..., + dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, + path: Any | None = None, ) -> str: ... # undocumented def system_listMethods(self) -> list[str]: ... # undocumented def system_methodSignature(self, method_name: str) -> str: ... # undocumented @@ -70,56 +69,53 @@ class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): def report_404(self) -> None: ... class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): - _send_traceback_handler: bool def __init__( self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented - dispatchers: dict[str, SimpleXMLRPCDispatcher] def __init__( self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... def add_dispatcher(self, path: str, dispatcher: SimpleXMLRPCDispatcher) -> SimpleXMLRPCDispatcher: ... def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): - def __init__(self, allow_none: bool = ..., encoding: str | None = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... def handle_xmlrpc(self, request_text: str) -> None: ... def handle_get(self) -> None: ... - def handle_request(self, request_text: str | None = ...) -> None: ... + def handle_request(self, request_text: str | None = None) -> None: ... class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented def docroutine( # type: ignore[override] self, object: object, name: str, - mod: str | None = ..., + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., - cl: type | None = ..., + cl: type | None = None, ) -> str: ... def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... class XMLRPCDocGenerator: # undocumented - server_name: str server_documentation: str server_title: str @@ -136,11 +132,11 @@ class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): diff --git a/mypy/typeshed/stdlib/zipapp.pyi b/mypy/typeshed/stdlib/zipapp.pyi index 3363161c3c6f..c7cf1704b135 100644 --- a/mypy/typeshed/stdlib/zipapp.pyi +++ b/mypy/typeshed/stdlib/zipapp.pyi @@ -11,10 +11,10 @@ class ZipAppError(ValueError): ... def create_archive( source: _Path, - target: _Path | None = ..., - interpreter: str | None = ..., - main: str | None = ..., - filter: Callable[[Path], bool] | None = ..., - compressed: bool = ..., + target: _Path | None = None, + interpreter: str | None = None, + main: str | None = None, + filter: Callable[[Path], bool] | None = None, + compressed: bool = False, ) -> None: ... def get_interpreter(archive: _Path) -> str: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index e964cd6eda87..0cb6138dfddd 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -70,7 +70,7 @@ class ZipExtFile(io.BufferedIOBase): fileobj: _ClosableZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, - pwd: bytes | None = ..., + pwd: bytes | None = None, *, close_fileobj: Literal[True], ) -> None: ... @@ -80,14 +80,14 @@ class ZipExtFile(io.BufferedIOBase): fileobj: _ZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, - pwd: bytes | None = ..., - close_fileobj: Literal[False] = ..., + pwd: bytes | None = None, + close_fileobj: Literal[False] = False, ) -> None: ... - def read(self, n: int | None = ...) -> bytes: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] - def peek(self, n: int = ...) -> bytes: ... + def read(self, n: int | None = -1) -> bytes: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + def peek(self, n: int = 1) -> bytes: ... def read1(self, n: int | None) -> bytes: ... # type: ignore[override] - def seek(self, offset: int, whence: int = ...) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... class _Writer(Protocol): def write(self, __s: str) -> object: ... @@ -109,45 +109,45 @@ class ZipFile: def __init__( self, file: StrPath | IO[bytes], - mode: Literal["r"] = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., + strict_timestamps: bool = True, metadata_encoding: str | None, ) -> None: ... @overload def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., - metadata_encoding: None = ..., + strict_timestamps: bool = True, + metadata_encoding: None = None, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., + strict_timestamps: bool = True, ) -> None: ... else: def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, ) -> None: ... def __enter__(self: Self) -> Self: ... @@ -159,34 +159,38 @@ class ZipFile: def infolist(self) -> list[ZipInfo]: ... def namelist(self) -> list[str]: ... def open( - self, name: str | ZipInfo, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ... + self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False ) -> IO[bytes]: ... - def extract(self, member: str | ZipInfo, path: StrPath | None = ..., pwd: bytes | None = ...) -> str: ... + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... def extractall( - self, path: StrPath | None = ..., members: Iterable[str | ZipInfo] | None = ..., pwd: bytes | None = ... + self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None ) -> None: ... - def printdir(self, file: _Writer | None = ...) -> None: ... + def printdir(self, file: _Writer | None = None) -> None: ... def setpassword(self, pwd: bytes) -> None: ... - def read(self, name: str | ZipInfo, pwd: bytes | None = ...) -> bytes: ... + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... def testzip(self) -> str | None: ... def write( - self, filename: StrPath, arcname: StrPath | None = ..., compress_type: int | None = ..., compresslevel: int | None = ... + self, + filename: StrPath, + arcname: StrPath | None = None, + compress_type: int | None = None, + compresslevel: int | None = None, ) -> None: ... def writestr( self, zinfo_or_arcname: str | ZipInfo, data: _BufferWithLen | str, - compress_type: int | None = ..., - compresslevel: int | None = ..., + compress_type: int | None = None, + compresslevel: int | None = None, ) -> None: ... if sys.version_info >= (3, 11): - def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = ...) -> None: ... + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... class PyZipFile(ZipFile): def __init__( - self, file: str | IO[bytes], mode: _ZipFileMode = ..., compression: int = ..., allowZip64: bool = ..., optimize: int = ... + self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 ) -> None: ... - def writepy(self, pathname: str, basename: str = ..., filterfunc: Callable[[str], bool] | None = ...) -> None: ... + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... class ZipInfo: filename: str @@ -207,18 +211,18 @@ class ZipInfo: compress_size: int file_size: int orig_filename: str # undocumented - def __init__(self, filename: str = ..., date_time: _DateTuple = ...) -> None: ... + def __init__(self, filename: str = "NoName", date_time: _DateTuple = ...) -> None: ... if sys.version_info >= (3, 8): @classmethod def from_file( - cls: type[Self], filename: StrPath, arcname: StrPath | None = ..., *, strict_timestamps: bool = ... + cls: type[Self], filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True ) -> Self: ... else: @classmethod - def from_file(cls: type[Self], filename: StrPath, arcname: StrPath | None = ...) -> Self: ... + def from_file(cls: type[Self], filename: StrPath, arcname: StrPath | None = None) -> Self: ... def is_dir(self) -> bool: ... - def FileHeader(self, zip64: bool | None = ...) -> bytes: ... + def FileHeader(self, zip64: bool | None = None) -> bytes: ... class _PathOpenProtocol(Protocol): def __call__(self, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... @@ -240,9 +244,11 @@ if sys.version_info >= (3, 8): @property def stem(self) -> str: ... - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = ...) -> None: ... + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... if sys.version_info >= (3, 9): - def open(self, mode: _ReadWriteBinaryMode = ..., *args: Any, pwd: bytes | None = ..., **kwargs: Any) -> IO[bytes]: ... + def open( + self, mode: _ReadWriteBinaryMode = "r", *args: Any, pwd: bytes | None = None, **kwargs: Any + ) -> IO[bytes]: ... else: @property def open(self) -> _PathOpenProtocol: ... diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index dc2f1aee0752..ee97faace379 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -17,8 +17,8 @@ class zipimporter: else: def __init__(self, path: StrOrBytesPath) -> None: ... - def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented - def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... def get_data(self, pathname: str) -> bytes: ... def get_filename(self, fullname: str) -> str: ... @@ -27,5 +27,5 @@ class zipimporter: def is_package(self, fullname: str) -> bool: ... def load_module(self, fullname: str) -> ModuleType: ... if sys.version_info >= (3, 10): - def find_spec(self, fullname: str, target: ModuleType | None = ...) -> ModuleSpec | None: ... + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... def invalidate_caches(self) -> None: ... diff --git a/mypy/typeshed/stdlib/zlib.pyi b/mypy/typeshed/stdlib/zlib.pyi index ea41567eefc5..c3419af0de3f 100644 --- a/mypy/typeshed/stdlib/zlib.pyi +++ b/mypy/typeshed/stdlib/zlib.pyi @@ -40,22 +40,17 @@ class _Decompress: def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(__data: ReadableBuffer, __value: int = ...) -> int: ... +def adler32(__data: ReadableBuffer, __value: int = 1) -> int: ... if sys.version_info >= (3, 11): - def compress(__data: ReadableBuffer, level: int = ..., wbits: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = -1, wbits: int = 15) -> bytes: ... else: - def compress(__data: ReadableBuffer, level: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = -1) -> bytes: ... def compressobj( - level: int = ..., - method: int = ..., - wbits: int = ..., - memLevel: int = ..., - strategy: int = ..., - zdict: ReadableBuffer | None = ..., + level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None ) -> _Compress: ... -def crc32(__data: ReadableBuffer, __value: int = ...) -> int: ... -def decompress(__data: ReadableBuffer, wbits: int = ..., bufsize: int = ...) -> bytes: ... -def decompressobj(wbits: int = ..., zdict: ReadableBuffer = ...) -> _Decompress: ... +def crc32(__data: ReadableBuffer, __value: int = 0) -> int: ... +def decompress(__data: ReadableBuffer, wbits: int = 15, bufsize: int = 16384) -> bytes: ... +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index 8b9ba9e7023a..0bdf853f4069 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -26,7 +26,7 @@ class ZoneInfo(tzinfo): # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 -def reset_tzpath(to: Sequence[StrPath] | None = ...) -> None: ... +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... def available_timezones() -> set[str]: ... TZPATH: Sequence[str] From 820c46a4d75ec5f6dc95c09845a317ff59c4b4bf Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 26 Sep 2022 12:55:07 -0700 Subject: [PATCH 244/292] Remove use of LiteralString in builtins (#13743) --- mypy/typeshed/stdlib/builtins.pyi | 94 +------------------------------ 1 file changed, 1 insertion(+), 93 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 022b540d1e48..40f740d0838c 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -54,7 +54,7 @@ from typing import ( # noqa: Y022 overload, type_check_only, ) -from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -413,17 +413,8 @@ class str(Sequence[str]): def __new__(cls: type[Self], object: object = ...) -> Self: ... @overload def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - @overload - def capitalize(self: LiteralString) -> LiteralString: ... - @overload def capitalize(self) -> str: ... # type: ignore[misc] - @overload - def casefold(self: LiteralString) -> LiteralString: ... - @overload def casefold(self) -> str: ... # type: ignore[misc] - @overload - def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -431,20 +422,11 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - @overload - def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -460,91 +442,32 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... - @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - @overload - def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def lower(self: LiteralString) -> LiteralString: ... - @overload def lower(self) -> str: ... # type: ignore[misc] - @overload - def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def replace( - self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 - ) -> LiteralString: ... - @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... - @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... - @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... - @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - @overload - def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def swapcase(self: LiteralString) -> LiteralString: ... - @overload def swapcase(self) -> str: ... # type: ignore[misc] - @overload - def title(self: LiteralString) -> LiteralString: ... - @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... - @overload - def upper(self: LiteralString) -> LiteralString: ... - @overload def upper(self) -> str: ... # type: ignore[misc] - @overload - def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... - @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -555,9 +478,6 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... - @overload - def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... - @overload def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] @@ -565,25 +485,13 @@ class str(Sequence[str]): def __ge__(self, __x: str) -> bool: ... def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... - @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... - @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... - @overload - def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... - @overload def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] - @overload - def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... - @overload - def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... From af7604de58c4c4952fd51a7556a6c56466113010 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 12:47:21 -0700 Subject: [PATCH 245/292] Revert sum literal integer change (#13961) This is allegedly causing large performance problems, see 13821 typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing to undo. Patching this in typeshed also feels weird, since there's a more general soundness issue. If a typevar has a bound or constraint, we might not want to solve it to a Literal. If we can confirm the performance regression or fix the unsoundness within mypy, I might pursue upstreaming this in typeshed. (Reminder: add this to the sync_typeshed script once merged) --- mypy/typeshed/stdlib/builtins.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 40f740d0838c..9f45a937764b 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1634,11 +1634,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... From fe40f814387fc671ba0cc679453b01eabeb7c112 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 01:08:36 -0800 Subject: [PATCH 246/292] Revert typeshed ctypes change (#14128) Since the plugin provides superior type checking: https://github.com/python/mypy/pull/13987#issuecomment-1310863427 --- mypy/typeshed/stdlib/ctypes/__init__.pyi | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index cd31a36a354d..5c4299989d92 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -271,11 +271,7 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - # Note: only available if _CT == c_char - @property - def raw(self) -> bytes: ... - @raw.setter - def raw(self, value: ReadableBuffer) -> None: ... + raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT From f5276562843755cf50f3adea8d72a9cef665c0c1 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 3 Feb 2023 13:51:33 -0800 Subject: [PATCH 247/292] Update commit hashes in sync-typeshed (#14599) Also use pyproject.toml instead of README.md because there are fewer pyproject.toml in mypy Co-authored-by: Alex Waygood --- misc/sync-typeshed.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 8eeb9be7f4f8..98f94bbccd8b 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -24,7 +24,7 @@ def check_state() -> None: - if not os.path.isfile("README.md") and not os.path.isdir("mypy"): + if not os.path.isfile("pyproject.toml") or not os.path.isdir("mypy"): sys.exit("error: The current working directory must be the mypy repository root") out = subprocess.check_output(["git", "status", "-s", os.path.join("mypy", "typeshed")]) if out: @@ -185,9 +185,9 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "780534b13722b7b0422178c049a1cbbf4ea4255b", # LiteralString reverts - "5319fa34a8004c1568bb6f032a07b8b14cc95bed", # sum reverts - "0062994228fb62975c6cef4d2c80d00c7aa1c545", # ctypes reverts + "820c46a4d75ec5f6dc95c09845a317ff59c4b4bf", # LiteralString reverts + "af7604de58c4c4952fd51a7556a6c56466113010", # sum reverts + "fe40f814387fc671ba0cc679453b01eabeb7c112", # ctypes reverts ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) From b64bd3d1ddffb99b38b9fc28e37c908313d4778e Mon Sep 17 00:00:00 2001 From: Richard Si Date: Sun, 5 Feb 2023 01:28:09 -0500 Subject: [PATCH 248/292] [mypyc] Support __(r)divmod__ dunders (#14613) Pretty simple. Towards https://github.com/mypyc/mypyc/issues/553. --- mypyc/codegen/emitclass.py | 2 ++ mypyc/primitives/generic_ops.py | 11 +++++++++++ mypyc/test-data/fixtures/ir.py | 11 +++++++++++ mypyc/test-data/irbuild-any.test | 15 +++++++++++++++ mypyc/test-data/run-dunders.test | 5 +++++ 5 files changed, 44 insertions(+) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 72e16345a325..79fdd9103371 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -82,6 +82,8 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: "__rtruediv__": ("nb_true_divide", generate_bin_op_wrapper), "__floordiv__": ("nb_floor_divide", generate_bin_op_wrapper), "__rfloordiv__": ("nb_floor_divide", generate_bin_op_wrapper), + "__divmod__": ("nb_divmod", generate_bin_op_wrapper), + "__rdivmod__": ("nb_divmod", generate_bin_op_wrapper), "__lshift__": ("nb_lshift", generate_bin_op_wrapper), "__rlshift__": ("nb_lshift", generate_bin_op_wrapper), "__rshift__": ("nb_rshift", generate_bin_op_wrapper), diff --git a/mypyc/primitives/generic_ops.py b/mypyc/primitives/generic_ops.py index f6817ad024b7..4f04608d11f3 100644 --- a/mypyc/primitives/generic_ops.py +++ b/mypyc/primitives/generic_ops.py @@ -75,6 +75,17 @@ priority=0, ) + +function_op( + name="builtins.divmod", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyNumber_Divmod", + error_kind=ERR_MAGIC, + priority=0, +) + + for op, funcname in [ ("+=", "PyNumber_InPlaceAdd"), ("-=", "PyNumber_InPlaceSubtract"), diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 2f3c18e9c731..37aab1d826d7 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -8,6 +8,7 @@ T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) S = TypeVar('S') K = TypeVar('K') # for keys in mapping V = TypeVar('V') # for values in mapping @@ -15,6 +16,11 @@ class __SupportsAbs(Protocol[T_co]): def __abs__(self) -> T_co: pass +class __SupportsDivMod(Protocol[T_contra, T_co]): + def __divmod__(self, other: T_contra) -> T_co: ... + +class __SupportsRDivMod(Protocol[T_contra, T_co]): + def __rdivmod__(self, other: T_contra) -> T_co: ... class object: def __init__(self) -> None: pass @@ -42,6 +48,7 @@ def __pow__(self, n: int, modulo: Optional[int] = None) -> int: pass def __floordiv__(self, x: int) -> int: pass def __truediv__(self, x: float) -> float: pass def __mod__(self, x: int) -> int: pass + def __divmod__(self, x: float) -> Tuple[float, float]: pass def __neg__(self) -> int: pass def __pos__(self) -> int: pass def __abs__(self) -> int: pass @@ -307,6 +314,10 @@ def zip(x: Iterable[T], y: Iterable[S]) -> Iterator[Tuple[T, S]]: ... def zip(x: Iterable[T], y: Iterable[S], z: Iterable[V]) -> Iterator[Tuple[T, S, V]]: ... def eval(e: str) -> Any: ... def abs(x: __SupportsAbs[T]) -> T: ... +@overload +def divmod(x: __SupportsDivMod[T_contra, T_co], y: T_contra) -> T_co: ... +@overload +def divmod(x: T_contra, y: __SupportsRDivMod[T_contra, T_co]) -> T_co: ... def exit() -> None: ... def min(x: T, y: T) -> T: ... def max(x: T, y: T) -> T: ... diff --git a/mypyc/test-data/irbuild-any.test b/mypyc/test-data/irbuild-any.test index bcf9a1880635..8cc626100262 100644 --- a/mypyc/test-data/irbuild-any.test +++ b/mypyc/test-data/irbuild-any.test @@ -198,3 +198,18 @@ L0: b = r4 return 1 +[case testFunctionBasedOps] +def f() -> None: + a = divmod(5, 2) +[out] +def f(): + r0, r1, r2 :: object + r3, a :: tuple[float, float] +L0: + r0 = object 5 + r1 = object 2 + r2 = PyNumber_Divmod(r0, r1) + r3 = unbox(tuple[float, float], r2) + a = r3 + return 1 + diff --git a/mypyc/test-data/run-dunders.test b/mypyc/test-data/run-dunders.test index 0b156e5c3af8..23323c7244de 100644 --- a/mypyc/test-data/run-dunders.test +++ b/mypyc/test-data/run-dunders.test @@ -402,6 +402,9 @@ class C: def __floordiv__(self, y: int) -> int: return self.x + y + 30 + def __divmod__(self, y: int) -> int: + return self.x + y + 40 + def test_generic() -> None: a: Any = C() assert a + 3 == 8 @@ -417,11 +420,13 @@ def test_generic() -> None: assert a @ 3 == 18 assert a / 2 == 27 assert a // 2 == 37 + assert divmod(a, 2) == 47 def test_native() -> None: c = C() assert c + 3 == 8 assert c - 3 == 2 + assert divmod(c, 3) == 48 def test_error() -> None: a: Any = C() From 332bb2d4c7a4c2093f1349b18c05431c96bdb4d9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sun, 5 Feb 2023 11:07:26 +0000 Subject: [PATCH 249/292] [mypyc] Detect if attribute definition conflicts with base class/trait (#14535) Require that all the attribute definitions have the same type. Overriding with a different type is unsafe, and we don't want to add runtime checks when accessing an attribute that might be overridden with a different type. If the types would have different runtime representations, supporting that at all would be complicated. Fixes mypyc/mypyc#970. --- mypyc/irbuild/prepare.py | 19 +++++++++++++ mypyc/test-data/irbuild-classes.test | 42 ++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 3c519c3d1c33..48a37de518b7 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -60,6 +60,7 @@ is_trait, ) from mypyc.options import CompilerOptions +from mypyc.sametype import is_same_type def build_type_map( @@ -112,6 +113,24 @@ def build_type_map( prepare_func_def(module.fullname, None, func, mapper) # TODO: what else? + # Check for incompatible attribute definitions that were not + # flagged by mypy but can't be supported when compiling. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + for attr in class_ir.attributes: + for base_ir in class_ir.mro[1:]: + if attr in base_ir.attributes: + if not is_same_type(class_ir.attributes[attr], base_ir.attributes[attr]): + node = cdef.info.names[attr].node + assert node is not None + kind = "trait" if base_ir.is_trait else "class" + errors.error( + f'Type of "{attr}" is incompatible with ' + f'definition in {kind} "{base_ir.name}"', + module.path, + node.line, + ) + def is_from_module(node: SymbolNode, module: MypyFile) -> bool: return node.fullname == module.fullname + "." + node.name diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index 700a529f9627..b9501c32180d 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -1245,3 +1245,45 @@ L2: y = 4 L3: return 1 + +[case testIncompatibleDefinitionOfAttributeInSubclass] +from mypy_extensions import trait + +class Base: + x: int + +class Bad1(Base): + x: bool # E: Type of "x" is incompatible with definition in class "Base" + +class Good1(Base): + x: int + +class Good2(Base): + x: int = 0 + +class Good3(Base): + x = 0 + +class Good4(Base): + def __init__(self) -> None: + self.x = 0 + +class Good5(Base): + def __init__(self) -> None: + self.x: int = 0 + +class Base2(Base): + pass + +class Bad2(Base2): + x: bool = False # E: Type of "x" is incompatible with definition in class "Base" + +class Bad3(Base): + x = False # E: Type of "x" is incompatible with definition in class "Base" + +@trait +class T: + y: object + +class E(T): + y: str # E: Type of "y" is incompatible with definition in trait "T" From 27f51fc667e9ceaba12496276fce1db577221bcb Mon Sep 17 00:00:00 2001 From: Richard Si Date: Sun, 5 Feb 2023 06:09:56 -0500 Subject: [PATCH 250/292] [mypyc] Raise "non-trait base must be first..." error less frequently (#14468) It would raise even if there were only non-trait bases, leading to this slightly confusing situation: class A: pass class B: pass class C(A, B): pass # E: Non-trait bases must appear first in parent list # E: Multiple inheritance is not supported (except for traits) Now the bases must include a non-trait *and* the first base must be a trait to error. This leads to some false-negatives when there's more than one non-trait base, but in that case, it's better to only tell the user that multiple inheritance is not supported. See also: https://github.com/mypyc/mypyc/issues/826#issuecomment-1383215915 --- mypyc/irbuild/prepare.py | 8 ++++++-- mypyc/test-data/commandline.test | 18 ++++++++++++++++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 48a37de518b7..b3d10887ce21 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -275,8 +275,12 @@ def prepare_class_def( # Set up the parent class bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] - if not all(c.is_trait for c in bases[1:]): - errors.error("Non-trait bases must appear first in parent list", path, cdef.line) + if len(bases) > 1 and any(not c.is_trait for c in bases) and bases[0].is_trait: + # If the first base is a non-trait, don't ever error here. While it is correct + # to error if a trait comes before the next non-trait base (e.g. non-trait, trait, + # non-trait), it's pointless, confusing noise from the bigger issue: multiple + # inheritance is *not* supported. + errors.error("Non-trait base must appear first in parent list", path, cdef.line) ir.traits = [c for c in bases if c.is_trait] mro = [] # All mypyc base classes diff --git a/mypyc/test-data/commandline.test b/mypyc/test-data/commandline.test index e7ba11192d28..672e879fbe1e 100644 --- a/mypyc/test-data/commandline.test +++ b/mypyc/test-data/commandline.test @@ -150,7 +150,7 @@ class PureTrait: pass @trait -class Trait1(Concrete1): +class Trait1: pass class Concrete2: @@ -164,9 +164,23 @@ class Trait2(Concrete2): class NonExt(Concrete1): # E: Non-extension classes may not inherit from extension classes pass -class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Multiple inheritance is not supported (except for traits) + +class NopeMultipleInheritance(Concrete1, Concrete2): # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeMultipleInheritanceAndBadOrder(Concrete1, Trait1, Concrete2): # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeMultipleInheritanceAndBadOrder2(Concrete1, Concrete2, Trait1): # E: Multiple inheritance is not supported (except for traits) pass +class NopeMultipleInheritanceAndBadOrder3(Trait1, Concrete1, Concrete2): # E: Non-trait base must appear first in parent list # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeBadOrder(Trait1, Concrete2): # E: Non-trait base must appear first in parent list + pass + + @decorator class NonExt2: @property # E: Property setters not supported in non-extension classes From 5614ffa0ef87e604e07e3f83079d0499c7d22886 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sun, 5 Feb 2023 11:57:24 +0000 Subject: [PATCH 251/292] [mypyc] Generate faster code for bool comparisons and arithmetic (#14489) Generate specialized, efficient IR for various operations on bools. These are covered: * Bool comparisons * Mixed bool/integer comparisons * Bool arithmetic (binary and unary) * Mixed bool/integer arithmetic and bitwise ops Mixed operations where the left operand is a `bool` and the right operand is a native int still have some unnecessary conversions between native int and `int`. This would be a bit trickier to fix and is seems rare, so it doesn't seem urgent to fix this. Fixes mypyc/mypyc#968. --- mypyc/analysis/ircheck.py | 6 +- mypyc/irbuild/ll_builder.py | 82 +++++--- mypyc/test-data/irbuild-bool.test | 319 ++++++++++++++++++++++++++++++ mypyc/test-data/irbuild-i64.test | 39 ++++ mypyc/test-data/irbuild-int.test | 11 ++ mypyc/test-data/run-bools.test | 102 ++++++++++ 6 files changed, 533 insertions(+), 26 deletions(-) diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index e96c640fa8a1..719faebfcee8 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -217,6 +217,10 @@ def check_type_coercion(self, op: Op, src: RType, dest: RType) -> None: source=op, desc=f"Cannot coerce source type {src.name} to dest type {dest.name}" ) + def check_compatibility(self, op: Op, t: RType, s: RType) -> None: + if not can_coerce_to(t, s) or not can_coerce_to(s, t): + self.fail(source=op, desc=f"{t.name} and {s.name} are not compatible") + def visit_goto(self, op: Goto) -> None: self.check_control_op_targets(op) @@ -375,7 +379,7 @@ def visit_int_op(self, op: IntOp) -> None: pass def visit_comparison_op(self, op: ComparisonOp) -> None: - pass + self.check_compatibility(op, op.lhs.type, op.rhs.type) def visit_load_mem(self, op: LoadMem) -> None: pass diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 019f709f0acc..691f4729e4a4 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -199,6 +199,9 @@ ">>=", } +# Binary operations on bools that are specialized and don't just promote operands to int +BOOL_BINARY_OPS: Final = {"&", "&=", "|", "|=", "^", "^=", "==", "!=", "<", "<=", ">", ">="} + class LowLevelIRBuilder: def __init__(self, current_module: str, mapper: Mapper, options: CompilerOptions) -> None: @@ -326,13 +329,13 @@ def coerce( ): # Equivalent types return src - elif ( - is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) - ) and is_int_rprimitive(target_type): + elif (is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type)) and is_tagged( + target_type + ): shifted = self.int_op( bool_rprimitive, src, Integer(1, bool_rprimitive), IntOp.LEFT_SHIFT ) - return self.add(Extend(shifted, int_rprimitive, signed=False)) + return self.add(Extend(shifted, target_type, signed=False)) elif ( is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) ) and is_fixed_width_rtype(target_type): @@ -1245,48 +1248,45 @@ def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: return self.compare_bytes(lreg, rreg, op, line) if is_tagged(ltype) and is_tagged(rtype) and op in int_comparison_op_mapping: return self.compare_tagged(lreg, rreg, op, line) - if ( - is_bool_rprimitive(ltype) - and is_bool_rprimitive(rtype) - and op in ("&", "&=", "|", "|=", "^", "^=") - ): - return self.bool_bitwise_op(lreg, rreg, op[0], line) + if is_bool_rprimitive(ltype) and is_bool_rprimitive(rtype) and op in BOOL_BINARY_OPS: + if op in ComparisonOp.signed_ops: + return self.bool_comparison_op(lreg, rreg, op, line) + else: + return self.bool_bitwise_op(lreg, rreg, op[0], line) if isinstance(rtype, RInstance) and op in ("in", "not in"): return self.translate_instance_contains(rreg, lreg, op, line) if is_fixed_width_rtype(ltype): if op in FIXED_WIDTH_INT_BINARY_OPS: if op.endswith("="): op = op[:-1] + if op != "//": + op_id = int_op_to_id[op] + else: + op_id = IntOp.DIV + if is_bool_rprimitive(rtype) or is_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) + rtype = ltype if is_fixed_width_rtype(rtype) or is_tagged(rtype): - if op != "//": - op_id = int_op_to_id[op] - else: - op_id = IntOp.DIV return self.fixed_width_int_op(ltype, lreg, rreg, op_id, line) if isinstance(rreg, Integer): # TODO: Check what kind of Integer - if op != "//": - op_id = int_op_to_id[op] - else: - op_id = IntOp.DIV return self.fixed_width_int_op( ltype, lreg, Integer(rreg.value >> 1, ltype), op_id, line ) elif op in ComparisonOp.signed_ops: if is_int_rprimitive(rtype): rreg = self.coerce_int_to_fixed_width(rreg, ltype, line) + elif is_bool_rprimitive(rtype) or is_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) op_id = ComparisonOp.signed_ops[op] if is_fixed_width_rtype(rreg.type): return self.comparison_op(lreg, rreg, op_id, line) if isinstance(rreg, Integer): return self.comparison_op(lreg, Integer(rreg.value >> 1, ltype), op_id, line) elif is_fixed_width_rtype(rtype): - if ( - isinstance(lreg, Integer) or is_tagged(ltype) - ) and op in FIXED_WIDTH_INT_BINARY_OPS: + if op in FIXED_WIDTH_INT_BINARY_OPS: if op.endswith("="): op = op[:-1] - # TODO: Support comparison ops (similar to above) if op != "//": op_id = int_op_to_id[op] else: @@ -1296,15 +1296,38 @@ def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: return self.fixed_width_int_op( rtype, Integer(lreg.value >> 1, rtype), rreg, op_id, line ) - else: + if is_tagged(ltype): + return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) + if is_bool_rprimitive(ltype) or is_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) elif op in ComparisonOp.signed_ops: if is_int_rprimitive(ltype): lreg = self.coerce_int_to_fixed_width(lreg, rtype, line) + elif is_bool_rprimitive(ltype) or is_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) op_id = ComparisonOp.signed_ops[op] if isinstance(lreg, Integer): return self.comparison_op(Integer(lreg.value >> 1, rtype), rreg, op_id, line) + if is_fixed_width_rtype(lreg.type): + return self.comparison_op(lreg, rreg, op_id, line) + + # Mixed int comparisons + if op in ("==", "!="): + op_id = ComparisonOp.signed_ops[op] + if is_tagged(ltype) and is_subtype(rtype, ltype): + rreg = self.coerce(rreg, int_rprimitive, line) + return self.comparison_op(lreg, rreg, op_id, line) + if is_tagged(rtype) and is_subtype(ltype, rtype): + lreg = self.coerce(lreg, int_rprimitive, line) return self.comparison_op(lreg, rreg, op_id, line) + elif op in op in int_comparison_op_mapping: + if is_tagged(ltype) and is_subtype(rtype, ltype): + rreg = self.coerce(rreg, short_int_rprimitive, line) + return self.compare_tagged(lreg, rreg, op, line) + if is_tagged(rtype) and is_subtype(ltype, rtype): + lreg = self.coerce(lreg, short_int_rprimitive, line) + return self.compare_tagged(lreg, rreg, op, line) call_c_ops_candidates = binary_ops.get(op, []) target = self.matching_call_c(call_c_ops_candidates, [lreg, rreg], line) @@ -1509,14 +1532,21 @@ def bool_bitwise_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value assert False, op return self.add(IntOp(bool_rprimitive, lreg, rreg, code, line)) + def bool_comparison_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + op_id = ComparisonOp.signed_ops[op] + return self.comparison_op(lreg, rreg, op_id, line) + def unary_not(self, value: Value, line: int) -> Value: mask = Integer(1, value.type, line) return self.int_op(value.type, value, mask, IntOp.XOR, line) def unary_op(self, value: Value, expr_op: str, line: int) -> Value: typ = value.type - if (is_bool_rprimitive(typ) or is_bit_rprimitive(typ)) and expr_op == "not": - return self.unary_not(value, line) + if is_bool_rprimitive(typ) or is_bit_rprimitive(typ): + if expr_op == "not": + return self.unary_not(value, line) + if expr_op == "+": + return value if is_fixed_width_rtype(typ): if expr_op == "-": # Translate to '0 - x' @@ -1532,6 +1562,8 @@ def unary_op(self, value: Value, expr_op: str, line: int) -> Value: if is_short_int_rprimitive(typ): num >>= 1 return Integer(-num, typ, value.line) + if is_tagged(typ) and expr_op == "+": + return value if isinstance(typ, RInstance): if expr_op == "-": method = "__neg__" diff --git a/mypyc/test-data/irbuild-bool.test b/mypyc/test-data/irbuild-bool.test index 407ab8bcda93..9257d8d63f7e 100644 --- a/mypyc/test-data/irbuild-bool.test +++ b/mypyc/test-data/irbuild-bool.test @@ -142,3 +142,322 @@ L2: r4 = 0 L3: return r4 + +[case testBoolComparisons] +def eq(x: bool, y: bool) -> bool: + return x == y + +def neq(x: bool, y: bool) -> bool: + return x != y + +def lt(x: bool, y: bool) -> bool: + return x < y + +def le(x: bool, y: bool) -> bool: + return x <= y + +def gt(x: bool, y: bool) -> bool: + return x > y + +def ge(x: bool, y: bool) -> bool: + return x >= y +[out] +def eq(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x == y + return r0 +def neq(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x != y + return r0 +def lt(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x < y :: signed + return r0 +def le(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x <= y :: signed + return r0 +def gt(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x > y :: signed + return r0 +def ge(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x >= y :: signed + return r0 + +[case testBoolMixedComparisons1] +from mypy_extensions import i64 + +def eq1(x: int, y: bool) -> bool: + return x == y + +def eq2(x: bool, y: int) -> bool: + return x == y + +def neq1(x: i64, y: bool) -> bool: + return x != y + +def neq2(x: bool, y: i64) -> bool: + return x != y +[out] +def eq1(x, y): + x :: int + y, r0 :: bool + r1 :: int + r2 :: bit +L0: + r0 = y << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = x == r1 + return r2 +def eq2(x, y): + x :: bool + y :: int + r0 :: bool + r1 :: int + r2 :: bit +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = r1 == y + return r2 +def neq1(x, y): + x :: int64 + y :: bool + r0 :: int64 + r1 :: bit +L0: + r0 = extend y: builtins.bool to int64 + r1 = x != r0 + return r1 +def neq2(x, y): + x :: bool + y, r0 :: int64 + r1 :: bit +L0: + r0 = extend x: builtins.bool to int64 + r1 = r0 != y + return r1 + +[case testBoolMixedComparisons2] +from mypy_extensions import i64 + +def lt1(x: bool, y: int) -> bool: + return x < y + +def lt2(x: int, y: bool) -> bool: + return x < y + +def gt1(x: bool, y: i64) -> bool: + return x < y + +def gt2(x: i64, y: bool) -> bool: + return x < y +[out] +def lt1(x, y): + x :: bool + y :: int + r0 :: bool + r1 :: short_int + r2 :: native_int + r3 :: bit + r4 :: native_int + r5, r6, r7 :: bit + r8 :: bool + r9 :: bit +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to short_int + r2 = r1 & 1 + r3 = r2 == 0 + r4 = y & 1 + r5 = r4 == 0 + r6 = r3 & r5 + if r6 goto L1 else goto L2 :: bool +L1: + r7 = r1 < y :: signed + r8 = r7 + goto L3 +L2: + r9 = CPyTagged_IsLt_(r1, y) + r8 = r9 +L3: + return r8 +def lt2(x, y): + x :: int + y, r0 :: bool + r1 :: short_int + r2 :: native_int + r3 :: bit + r4 :: native_int + r5, r6, r7 :: bit + r8 :: bool + r9 :: bit +L0: + r0 = y << 1 + r1 = extend r0: builtins.bool to short_int + r2 = x & 1 + r3 = r2 == 0 + r4 = r1 & 1 + r5 = r4 == 0 + r6 = r3 & r5 + if r6 goto L1 else goto L2 :: bool +L1: + r7 = x < r1 :: signed + r8 = r7 + goto L3 +L2: + r9 = CPyTagged_IsLt_(x, r1) + r8 = r9 +L3: + return r8 +def gt1(x, y): + x :: bool + y, r0 :: int64 + r1 :: bit +L0: + r0 = extend x: builtins.bool to int64 + r1 = r0 < y :: signed + return r1 +def gt2(x, y): + x :: int64 + y :: bool + r0 :: int64 + r1 :: bit +L0: + r0 = extend y: builtins.bool to int64 + r1 = x < r0 :: signed + return r1 + +[case testBoolBitwise] +from mypy_extensions import i64 +def bitand(x: bool, y: bool) -> bool: + b = x & y + return b +def bitor(x: bool, y: bool) -> bool: + b = x | y + return b +def bitxor(x: bool, y: bool) -> bool: + b = x ^ y + return b +def invert(x: bool) -> int: + return ~x +def mixed_bitand(x: i64, y: bool) -> i64: + return x & y +[out] +def bitand(x, y): + x, y, r0, b :: bool +L0: + r0 = x & y + b = r0 + return b +def bitor(x, y): + x, y, r0, b :: bool +L0: + r0 = x | y + b = r0 + return b +def bitxor(x, y): + x, y, r0, b :: bool +L0: + r0 = x ^ y + b = r0 + return b +def invert(x): + x, r0 :: bool + r1, r2 :: int +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Invert(r1) + return r2 +def mixed_bitand(x, y): + x :: int64 + y :: bool + r0, r1 :: int64 +L0: + r0 = extend y: builtins.bool to int64 + r1 = x & r0 + return r1 + +[case testBoolArithmetic] +def add(x: bool, y: bool) -> int: + z = x + y + return z +def mixed(b: bool, n: int) -> int: + z = b + n + z -= b + z = z * b + return z +def negate(b: bool) -> int: + return -b +def unary_plus(b: bool) -> int: + x = +b + return x +[out] +def add(x, y): + x, y, r0 :: bool + r1 :: int + r2 :: bool + r3, r4, z :: int +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = y << 1 + r3 = extend r2: builtins.bool to builtins.int + r4 = CPyTagged_Add(r1, r3) + z = r4 + return z +def mixed(b, n): + b :: bool + n :: int + r0 :: bool + r1, r2, z :: int + r3 :: bool + r4, r5 :: int + r6 :: bool + r7, r8 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Add(r1, n) + z = r2 + r3 = b << 1 + r4 = extend r3: builtins.bool to builtins.int + r5 = CPyTagged_Subtract(z, r4) + z = r5 + r6 = b << 1 + r7 = extend r6: builtins.bool to builtins.int + r8 = CPyTagged_Multiply(z, r7) + z = r8 + return z +def negate(b): + b, r0 :: bool + r1, r2 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Negate(r1) + return r2 +def unary_plus(b): + b, r0 :: bool + r1, x :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + x = r1 + return x diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index 6b8dd357421f..253d1a837c7b 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1731,6 +1731,45 @@ def f5(): L0: return 4 +[case testI64OperationsWithBools] +from mypy_extensions import i64 + +# TODO: Other mixed operations + +def add_bool_to_int(n: i64, b: bool) -> i64: + return n + b + +def compare_bool_to_i64(n: i64, b: bool) -> bool: + if n == b: + return b != n + return True +[out] +def add_bool_to_int(n, b): + n :: int64 + b :: bool + r0, r1 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + r1 = n + r0 + return r1 +def compare_bool_to_i64(n, b): + n :: int64 + b :: bool + r0 :: int64 + r1 :: bit + r2 :: int64 + r3 :: bit +L0: + r0 = extend b: builtins.bool to int64 + r1 = n == r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = extend b: builtins.bool to int64 + r3 = r2 != n + return r3 +L2: + return 1 + [case testI64Cast] from typing import cast from mypy_extensions import i64 diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index aebadce5650e..fbe00aff4040 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -222,3 +222,14 @@ def int_to_int(n): n :: int L0: return n + +[case testIntUnaryPlus] +def unary_plus(n: int) -> int: + x = +n + return x +[out] +def unary_plus(n): + n, x :: int +L0: + x = n + return x diff --git a/mypyc/test-data/run-bools.test b/mypyc/test-data/run-bools.test index e23b35d82fc5..522296592c54 100644 --- a/mypyc/test-data/run-bools.test +++ b/mypyc/test-data/run-bools.test @@ -16,6 +16,9 @@ False [case testBoolOps] from typing import Optional, Any +MYPY = False +if MYPY: + from mypy_extensions import i64 def f(x: bool) -> bool: if x: @@ -119,3 +122,102 @@ def test_any_to_bool() -> None: b: Any = a + 1 assert not bool(a) assert bool(b) + +def eq(x: bool, y: bool) -> bool: + return x == y + +def ne(x: bool, y: bool) -> bool: + return x != y + +def lt(x: bool, y: bool) -> bool: + return x < y + +def le(x: bool, y: bool) -> bool: + return x <= y + +def gt(x: bool, y: bool) -> bool: + return x > y + +def ge(x: bool, y: bool) -> bool: + return x >= y + +def test_comparisons() -> None: + for x in True, False: + for y in True, False: + x2: Any = x + y2: Any = y + assert eq(x, y) == (x2 == y2) + assert ne(x, y) == (x2 != y2) + assert lt(x, y) == (x2 < y2) + assert le(x, y) == (x2 <= y2) + assert gt(x, y) == (x2 > y2) + assert ge(x, y) == (x2 >= y2) + +def eq_mixed(x: bool, y: int) -> bool: + return x == y + +def neq_mixed(x: int, y: bool) -> bool: + return x != y + +def lt_mixed(x: bool, y: int) -> bool: + return x < y + +def gt_mixed(x: int, y: bool) -> bool: + return x > y + +def test_mixed_comparisons() -> None: + for x in True, False: + for n in -(1 << 70), -123, 0, 1, 1753, 1 << 70: + assert eq_mixed(x, n) == (int(x) == n) + assert neq_mixed(n, x) == (n != int(x)) + assert lt_mixed(x, n) == (int(x) < n) + assert gt_mixed(n, x) == (n > int(x)) + +def add(x: bool, y: bool) -> int: + return x + y + +def add_mixed(b: bool, n: int) -> int: + return b + n + +def sub_mixed(n: int, b: bool) -> int: + return n - b + +def test_arithmetic() -> None: + for x in True, False: + for y in True, False: + assert add(x, y) == int(x) + int(y) + for n in -(1 << 70), -123, 0, 1, 1753, 1 << 70: + assert add_mixed(x, n) == int(x) + n + assert sub_mixed(n, x) == n - int(x) + +def add_mixed_i64(b: bool, n: i64) -> i64: + return b + n + +def sub_mixed_i64(n: i64, b: bool) -> i64: + return n - b + +def test_arithmetic_i64() -> None: + for x in True, False: + for n in -(1 << 62), -123, 0, 1, 1753, 1 << 62: + assert add_mixed_i64(x, n) == int(x) + n + assert sub_mixed_i64(n, x) == n - int(x) + +def eq_mixed_i64(x: bool, y: i64) -> bool: + return x == y + +def neq_mixed_i64(x: i64, y: bool) -> bool: + return x != y + +def lt_mixed_i64(x: bool, y: i64) -> bool: + return x < y + +def gt_mixed_i64(x: i64, y: bool) -> bool: + return x > y + +def test_mixed_comparisons_i64() -> None: + for x in True, False: + for n in -(1 << 62), -123, 0, 1, 1753, 1 << 62: + assert eq_mixed_i64(x, n) == (int(x) == n) + assert neq_mixed_i64(n, x) == (n != int(x)) + assert lt_mixed_i64(x, n) == (int(x) < n) + assert gt_mixed_i64(n, x) == (n > int(x)) From 07f672148d2c3365b8fe3e879bee7c0d9936fb11 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 5 Feb 2023 19:34:32 +0000 Subject: [PATCH 252/292] Use a dedicated error code for assignment to method (#14570) Fixes #2427 I also add some special logic, so that `# type: ignore[assignment]` will cover `[method-assign]`. --- docs/source/error_code_list.rst | 29 ++++++++++++++++++++++++ docs/source/error_codes.rst | 11 +++++++++ mypy/errorcodes.py | 19 +++++++++++++++- mypy/errors.py | 12 +++++++++- mypy/messages.py | 2 +- test-data/unit/check-errorcodes.test | 34 +++++++++++++++++++++++++++- 6 files changed, 103 insertions(+), 4 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 11d01c884b33..dd049ee8bbdf 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -339,6 +339,35 @@ Example: # variable has type "str") [assignment] r.name = 5 +Check that assignment target is not a method [method-assign] +------------------------------------------------------------ + +In general, assigning to a method on class object or instance (a.k.a. +monkey-patching) is ambiguous in terms of types, since Python's static type +system cannot express difference between bound and unbound callable types. +Consider this example: + +.. code-block:: python + + class A: + def f(self) -> None: pass + def g(self) -> None: pass + + def h(self: A) -> None: pass + + A.f = h # type of h is Callable[[A], None] + A().f() # this works + A.f = A().g # type of A().g is Callable[[], None] + A().f() # but this also works at runtime + +To prevent the ambiguity, mypy will flag both assignments by default. If this +error code is disabled, mypy will treat all method assignments r.h.s. as unbound, +so the second assignment will still generate an error. + +.. note:: + + This error code is a sub-error code of a wider ``[assignment]`` code. + Check type variable values [type-var] ------------------------------------- diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index aabedf87f73a..34bb8ab6b5e1 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -113,3 +113,14 @@ still keep the other two error codes enabled. The overall logic is following: So one can e.g. enable some code globally, disable it for all tests in the corresponding config section, and then re-enable it with an inline comment in some specific test. + +Sub-error codes of other error codes +------------------------------------ + +In rare cases (mostly for backwards compatibility reasons), some error +code may be covered by another, wider error code. For example, an error with +code ``[method-assign]`` can be ignored by ``# type: ignore[assignment]``. +Similar logic works for disabling error codes globally. If a given error code +is a sub code of another one, it must mentioned in the docs for the narrower +code. This hierarchy is not nested, there cannot be sub-error codes of other +sub-error codes. diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index ab49e70eaf20..8881a767d72e 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -5,19 +5,30 @@ from __future__ import annotations +from collections import defaultdict from typing_extensions import Final error_codes: dict[str, ErrorCode] = {} +sub_code_map: dict[str, set[str]] = defaultdict(set) class ErrorCode: def __init__( - self, code: str, description: str, category: str, default_enabled: bool = True + self, + code: str, + description: str, + category: str, + default_enabled: bool = True, + sub_code_of: ErrorCode | None = None, ) -> None: self.code = code self.description = description self.category = category self.default_enabled = default_enabled + self.sub_code_of = sub_code_of + if sub_code_of is not None: + assert sub_code_of.sub_code_of is None, "Nested subcategories are not supported" + sub_code_map[sub_code_of.code].add(code) error_codes[code] = self def __str__(self) -> str: @@ -51,6 +62,12 @@ def __str__(self) -> str: ASSIGNMENT: Final[ErrorCode] = ErrorCode( "assignment", "Check that assigned value is compatible with target", "General" ) +METHOD_ASSIGN: Final[ErrorCode] = ErrorCode( + "method-assign", + "Check that assignment target is not a method", + "General", + sub_code_of=ASSIGNMENT, +) TYPE_ARG: Final = ErrorCode("type-arg", "Check that generic type arguments are present", "General") TYPE_VAR: Final = ErrorCode("type-var", "Check that type variable values are valid", "General") UNION_ATTR: Final = ErrorCode( diff --git a/mypy/errors.py b/mypy/errors.py index d1e13ad701fc..7cc0c5764861 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -586,7 +586,11 @@ def is_ignored_error(self, line: int, info: ErrorInfo, ignores: dict[int, list[s # Empty list means that we ignore all errors return True if info.code and self.is_error_code_enabled(info.code): - return info.code.code in ignores[line] + return ( + info.code.code in ignores[line] + or info.code.sub_code_of is not None + and info.code.sub_code_of.code in ignores[line] + ) return False def is_error_code_enabled(self, error_code: ErrorCode) -> bool: @@ -601,6 +605,8 @@ def is_error_code_enabled(self, error_code: ErrorCode) -> bool: return False elif error_code in current_mod_enabled: return True + elif error_code.sub_code_of is not None and error_code.sub_code_of in current_mod_disabled: + return False else: return error_code.default_enabled @@ -641,6 +647,10 @@ def generate_unused_ignore_errors(self, file: str) -> None: if len(ignored_codes) > 1 and len(unused_ignored_codes) > 0: unused_codes_message = f"[{', '.join(sorted(unused_ignored_codes))}]" message = f'Unused "type: ignore{unused_codes_message}" comment' + for unused in unused_ignored_codes: + narrower = set(used_ignored_codes) & codes.sub_code_map[unused] + if narrower: + message += f", use narrower [{', '.join(narrower)}] instead of [{unused}]" # Don't use report since add_error_info will ignore the error! info = ErrorInfo( self.import_context(), diff --git a/mypy/messages.py b/mypy/messages.py index a5fd09493456..23b6f7c0e991 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1402,7 +1402,7 @@ def base_class_definitions_incompatible( ) def cant_assign_to_method(self, context: Context) -> None: - self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.ASSIGNMENT) + self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.METHOD_ASSIGN) def cant_assign_to_classvar(self, name: str, context: Context) -> None: self.fail(f'Cannot assign to class variable "{name}" via instance', context) diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 8c6a446d101e..6e848e6a1e39 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -672,7 +672,7 @@ class A: def g(self: A) -> None: pass -A.f = g # E: Cannot assign to a method [assignment] +A.f = g # E: Cannot assign to a method [method-assign] [case testErrorCodeDefinedHereNoteIgnore] import m @@ -1006,3 +1006,35 @@ def f(): # flags: --disable-error-code=annotation-unchecked def f(): x: int = "no" # No warning here + +[case testMethodAssignmentSuppressed] +# flags: --disable-error-code=method-assign +class A: + def f(self) -> None: pass + def g(self) -> None: pass + +def h(self: A) -> None: pass + +A.f = h +# This actually works at runtime, but there is no way to express this in current type system +A.f = A().g # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Callable[[A], None]") [assignment] + +[case testMethodAssignCoveredByAssignmentIgnore] +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # type: ignore[assignment] + +[case testMethodAssignCoveredByAssignmentFlag] +# flags: --disable-error-code=assignment +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # OK + +[case testMethodAssignCoveredByAssignmentUnused] +# flags: --warn-unused-ignores +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # type: ignore[assignment] # E: Unused "type: ignore" comment, use narrower [method-assign] instead of [assignment] From 6787e51f8fa21a5740995c5eb8a29b6c59464767 Mon Sep 17 00:00:00 2001 From: "Yilei \"Dolee\" Yang" Date: Sun, 5 Feb 2023 23:43:27 -0800 Subject: [PATCH 253/292] Fix a few typos in mypyc's comments and docstrings (#14617) --- mypyc/codegen/emitclass.py | 2 +- mypyc/codegen/emitfunc.py | 2 +- mypyc/common.py | 2 +- mypyc/doc/using_type_annotations.rst | 2 +- mypyc/irbuild/function.py | 4 ++-- mypyc/irbuild/ll_builder.py | 2 +- mypyc/lib-rt/dict_ops.c | 2 +- mypyc/lib-rt/str_ops.c | 2 +- mypyc/test/test_run.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 79fdd9103371..15935c3b79f2 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -333,7 +333,7 @@ def emit_line() -> None: flags.append("_Py_TPFLAGS_HAVE_VECTORCALL") if not fields.get("tp_vectorcall"): # This is just a placeholder to please CPython. It will be - # overriden during setup. + # overridden during setup. fields["tp_call"] = "PyVectorcall_Call" fields["tp_flags"] = " | ".join(flags) diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 56a22447eeac..e7fb7db80413 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -432,7 +432,7 @@ def visit_set_attr(self, op: SetAttr) -> None: # ...and struct access for normal attributes. attr_expr = self.get_attr_expr(obj, op, decl_cl) if not op.is_init and attr_rtype.is_refcounted: - # This is not an initalization (where we know that the attribute was + # This is not an initialization (where we know that the attribute was # previously undefined), so decref the old value. always_defined = cl.is_always_defined(op.attr) if not always_defined: diff --git a/mypyc/common.py b/mypyc/common.py index 7412ebef4752..c8da5ff63bab 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -56,7 +56,7 @@ MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT MIN_LITERAL_SHORT_INT: Final = -MAX_LITERAL_SHORT_INT - 1 -# Decription of the C type used to track the definedness of attributes and +# Description of the C type used to track the definedness of attributes and # the presence of argument default values that have types with overlapping # error values. Each tracked attribute/argument has a dedicated bit in the # relevant bitmap. diff --git a/mypyc/doc/using_type_annotations.rst b/mypyc/doc/using_type_annotations.rst index be596fc23210..a01246ab0914 100644 --- a/mypyc/doc/using_type_annotations.rst +++ b/mypyc/doc/using_type_annotations.rst @@ -304,7 +304,7 @@ Example:: def example() -> None: # A small integer uses the value (unboxed) representation x = 5 - # A large integer the the heap (boxed) representation + # A large integer uses the heap (boxed) representation x = 2**500 # Lists always contain boxed integers a = [55] diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 5262b74e2853..02155d70e928 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -123,7 +123,7 @@ def transform_decorator(builder: IRBuilder, dec: Decorator) -> None: # if this is a registered singledispatch implementation with no other decorators), we should # treat this function as a regular function, not a decorated function elif dec.func in builder.fdefs_to_decorators: - # Obtain the the function name in order to construct the name of the helper function. + # Obtain the function name in order to construct the name of the helper function. name = dec.func.fullname.split(".")[-1] # Load the callable object representing the non-decorated function, and decorate it. @@ -397,7 +397,7 @@ def handle_ext_method(builder: IRBuilder, cdef: ClassDef, fdef: FuncDef) -> None builder.functions.append(func_ir) if is_decorated(builder, fdef): - # Obtain the the function name in order to construct the name of the helper function. + # Obtain the function name in order to construct the name of the helper function. _, _, name = fdef.fullname.rpartition(".") # Read the PyTypeObject representing the class, get the callable object # representing the non-decorated method diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 691f4729e4a4..2391ccc4d0ed 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -1386,7 +1386,7 @@ def compare_tagged_condition( ) -> None: """Compare two tagged integers using given operator (conditional context). - Assume lhs and and rhs are tagged integers. + Assume lhs and rhs are tagged integers. Args: lhs: Left operand diff --git a/mypyc/lib-rt/dict_ops.c b/mypyc/lib-rt/dict_ops.c index ba565257fd72..c0cc8d5a7f87 100644 --- a/mypyc/lib-rt/dict_ops.c +++ b/mypyc/lib-rt/dict_ops.c @@ -89,7 +89,7 @@ PyObject *CPyDict_SetDefaultWithEmptyDatatype(PyObject *dict, PyObject *key, int data_type) { PyObject *res = CPyDict_GetItem(dict, key); if (!res) { - // CPyDict_GetItem() would generates an PyExc_KeyError + // CPyDict_GetItem() would generates a PyExc_KeyError // when key is not found. PyErr_Clear(); diff --git a/mypyc/lib-rt/str_ops.c b/mypyc/lib-rt/str_ops.c index 3c0d275fbe39..90b19001f8f0 100644 --- a/mypyc/lib-rt/str_ops.c +++ b/mypyc/lib-rt/str_ops.c @@ -188,7 +188,7 @@ PyObject *CPyStr_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { return CPyObject_GetSlice(obj, start, end); } -/* Check if the given string is true (i.e. it's length isn't zero) */ +/* Check if the given string is true (i.e. its length isn't zero) */ bool CPyStr_IsTrue(PyObject *obj) { Py_ssize_t length = PyUnicode_GET_LENGTH(obj); return length != 0; diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index c867c9d37dac..6a5ab87fca49 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -255,7 +255,7 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> assert False, "Compile error" # Check that serialization works on this IR. (Only on the first - # step because the the returned ir only includes updated code.) + # step because the returned ir only includes updated code.) if incremental_step == 1: check_serialization_roundtrip(ir) From dc034786fdfdf5bf4f32c4cc5cf3aff3fc62c11a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 6 Feb 2023 19:28:36 +0000 Subject: [PATCH 254/292] [mypyc] Enable native integers outside tests (#14606) I think that native integers work well enough to enable them outside tests. Require a more recent `mypy_extensions` that includes native int types, including `i64` and `i32`. Add definitions of `i64` and `i32` to the bundled stubs for `mypy_extensions`. Fork the stubs, since the definitions only make sense for mypy/mypyc. They require custom type checking logic. Other tools can treat these as aliases to `int`, which was implemented here: https://github.com/python/typeshed/pull/9675 Also fix serialization of native int TypeInfos. Since we patch `builtins.int` when we process `mypy_extensions`, the patched information may not be serialized. We'll also need to perform similar patching during deserialization. Here is the performance impact to some benchmarks when using `i64` instead of `int` types (these assume some additional tweaks that should be ready soon): * richards: 33% faster * hexiom: 18% faster * deltablue: 2.6% faster Perhaps more importantly, native integers help with upcoming low-level features, such as packed arrays. Closes mypyc/mypyc#837. Remaining work can be tracked in separate issues. --- misc/sync-typeshed.py | 14 +--- mypy-requirements.txt | 2 +- mypy/checkexpr.py | 5 +- mypy/fixup.py | 7 ++ mypy/meet.py | 6 +- mypy/nodes.py | 8 +- mypy/semanal_classprop.py | 2 +- mypy/subtypes.py | 2 +- .../stubs/mypy-extensions/METADATA.toml | 2 +- .../stubs/mypy-extensions/mypy_extensions.pyi | 82 ++++++++++++++++++- mypyc/test-data/run-i32.test | 4 +- mypyc/test-data/run-i64.test | 22 ++--- pyproject.toml | 2 +- setup.py | 2 +- test-data/unit/check-incremental.test | 13 +++ test-data/unit/pythoneval.test | 24 ++++++ 16 files changed, 154 insertions(+), 43 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 98f94bbccd8b..5981b6b8fd0c 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -35,10 +35,13 @@ def check_state() -> None: def update_typeshed(typeshed_dir: str, commit: str | None) -> str: """Update contents of local typeshed copy. + We maintain our own separate mypy_extensions stubs, since it's + treated specially by mypy and we make assumptions about what's there. + We don't sync mypy_extensions stubs here -- this is done manually. + Return the normalized typeshed commit hash. """ assert os.path.isdir(os.path.join(typeshed_dir, "stdlib")) - assert os.path.isdir(os.path.join(typeshed_dir, "stubs")) if commit: subprocess.run(["git", "checkout", commit], check=True, cwd=typeshed_dir) commit = git_head_commit(typeshed_dir) @@ -48,15 +51,6 @@ def update_typeshed(typeshed_dir: str, commit: str | None) -> str: shutil.rmtree(stdlib_dir) # Copy new stdlib stubs. shutil.copytree(os.path.join(typeshed_dir, "stdlib"), stdlib_dir) - # Copy mypy_extensions stubs. We don't want to use a stub package, since it's - # treated specially by mypy and we make assumptions about what's there. - stubs_dir = os.path.join("mypy", "typeshed", "stubs") - shutil.rmtree(stubs_dir) - os.makedirs(stubs_dir) - shutil.copytree( - os.path.join(typeshed_dir, "stubs", "mypy-extensions"), - os.path.join(stubs_dir, "mypy-extensions"), - ) shutil.copy(os.path.join(typeshed_dir, "LICENSE"), os.path.join("mypy", "typeshed")) return commit diff --git a/mypy-requirements.txt b/mypy-requirements.txt index ee5fe5d295b8..9a55446eb05a 100644 --- a/mypy-requirements.txt +++ b/mypy-requirements.txt @@ -1,5 +1,5 @@ # NOTE: this needs to be kept in sync with the "requires" list in pyproject.toml typing_extensions>=3.10 -mypy_extensions>=0.4.3 +mypy_extensions>=1.0.0 typed_ast>=1.4.0,<2; python_version<'3.8' tomli>=1.1.0; python_version<'3.11' diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 2a04aeddb634..569928fbd014 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3354,7 +3354,10 @@ def lookup_definer(typ: Instance, attr_name: str) -> str | None: is_subtype(right_type, left_type) and isinstance(left_type, Instance) and isinstance(right_type, Instance) - and left_type.type.alt_promote is not right_type.type + and not ( + left_type.type.alt_promote is not None + and left_type.type.alt_promote.type is right_type.type + ) and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name) ): # When we do "A() + B()" where B is a subclass of A, we'll actually try calling diff --git a/mypy/fixup.py b/mypy/fixup.py index 3593e4faa184..5f76cc1d1487 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -87,6 +87,13 @@ def visit_type_info(self, info: TypeInfo) -> None: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: info.metaclass_type.accept(self.type_fixer) + if info.alt_promote: + info.alt_promote.accept(self.type_fixer) + instance = Instance(info, []) + # Hack: We may also need to add a backwards promotion (from int to native int), + # since it might not be serialized. + if instance not in info.alt_promote.type._promote: + info.alt_promote.type._promote.append(instance) if info._mro_refs: info.mro = [ lookup_fully_qualified_typeinfo( diff --git a/mypy/meet.py b/mypy/meet.py index 1cc125f3bfd6..d99e1a92d2eb 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -167,7 +167,7 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: if ( isinstance(narrowed, Instance) and narrowed.type.alt_promote - and narrowed.type.alt_promote is declared.type + and narrowed.type.alt_promote.type is declared.type ): # Special case: 'int' can't be narrowed down to a native int type such as # i64, since they have different runtime representations. @@ -715,10 +715,10 @@ def visit_instance(self, t: Instance) -> ProperType: return NoneType() else: alt_promote = t.type.alt_promote - if alt_promote and alt_promote is self.s.type: + if alt_promote and alt_promote.type is self.s.type: return t alt_promote = self.s.type.alt_promote - if alt_promote and alt_promote is t.type: + if alt_promote and alt_promote.type is t.type: return self.s if is_subtype(t, self.s): return t diff --git a/mypy/nodes.py b/mypy/nodes.py index 98976f4fe56a..72350c8d9925 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2932,7 +2932,7 @@ class is generic then it will be a type constructor of higher kind. # This results in some unintuitive results, such as that even # though i64 is compatible with int and int is compatible with # float, i64 is *not* compatible with float. - alt_promote: TypeInfo | None + alt_promote: mypy.types.Instance | None # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type @@ -3230,6 +3230,7 @@ def serialize(self) -> JsonDict: "bases": [b.serialize() for b in self.bases], "mro": [c.fullname for c in self.mro], "_promote": [p.serialize() for p in self._promote], + "alt_promote": None if self.alt_promote is None else self.alt_promote.serialize(), "declared_metaclass": ( None if self.declared_metaclass is None else self.declared_metaclass.serialize() ), @@ -3266,6 +3267,11 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: assert isinstance(t, mypy.types.ProperType) _promote.append(t) ti._promote = _promote + ti.alt_promote = ( + None + if data["alt_promote"] is None + else mypy.types.Instance.deserialize(data["alt_promote"]) + ) ti.declared_metaclass = ( None if data["declared_metaclass"] is None diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index ead80aed67b6..3f5bc9c4c2de 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -181,6 +181,6 @@ def add_type_promotion( int_sym = builtin_names["int"] assert isinstance(int_sym.node, TypeInfo) int_sym.node._promote.append(Instance(defn.info, [])) - defn.info.alt_promote = int_sym.node + defn.info.alt_promote = Instance(int_sym.node, []) if promote_targets: defn.info._promote.extend(promote_targets) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 9b555480e59b..c3d5517d43dd 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -455,7 +455,7 @@ def visit_instance(self, left: Instance) -> bool: # Special case: Low-level integer types are compatible with 'int'. We can't # use promotions, since 'int' is already promoted to low-level integer types, # and we can't have circular promotions. - if left.type.alt_promote is right.type: + if left.type.alt_promote and left.type.alt_promote.type is right.type: return True rname = right.type.fullname # Always try a nominal check if possible, diff --git a/mypy/typeshed/stubs/mypy-extensions/METADATA.toml b/mypy/typeshed/stubs/mypy-extensions/METADATA.toml index de6579f75d05..516f11f6b9e2 100644 --- a/mypy/typeshed/stubs/mypy-extensions/METADATA.toml +++ b/mypy/typeshed/stubs/mypy-extensions/METADATA.toml @@ -1,4 +1,4 @@ -version = "0.4.*" +version = "1.0.*" [tool.stubtest] ignore_missing_stub = false diff --git a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi index 47547942b2e7..40e24645fb77 100644 --- a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi +++ b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi @@ -1,10 +1,14 @@ +# These stubs are forked from typeshed, since we use some definitions that only make +# sense in the context of mypy/mypyc (in particular, native int types such as i64). + import abc import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Self from collections.abc import Mapping -from typing import Any, ClassVar, Generic, TypeVar, overload, type_check_only -from typing_extensions import Never +from typing import Any, ClassVar, Generic, SupportsInt, TypeVar, overload, type_check_only +from typing_extensions import Never, SupportsIndex +from _typeshed import ReadableBuffer, SupportsTrunc _T = TypeVar("_T") _U = TypeVar("_U") @@ -68,3 +72,77 @@ def trait(cls: _T) -> _T: ... def mypyc_attr(*attrs: str, **kwattrs: object) -> IdentityFunction: ... class FlexibleAlias(Generic[_T, _U]): ... + +# Native int types such as i64 are magical and support implicit +# coercions to/from int using special logic in mypy. We generally only +# include operations here for which we have specialized primitives. + +class i64: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i64: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i64: ... + + def __add__(self, x: i64) -> i64: ... + def __radd__(self, x: i64) -> i64: ... + def __sub__(self, x: i64) -> i64: ... + def __rsub__(self, x: i64) -> i64: ... + def __mul__(self, x: i64) -> i64: ... + def __rmul__(self, x: i64) -> i64: ... + def __floordiv__(self, x: i64) -> i64: ... + def __rfloordiv__(self, x: i64) -> i64: ... + def __mod__(self, x: i64) -> i64: ... + def __rmod__(self, x: i64) -> i64: ... + def __and__(self, x: i64) -> i64: ... + def __rand__(self, x: i64) -> i64: ... + def __or__(self, x: i64) -> i64: ... + def __ror__(self, x: i64) -> i64: ... + def __xor__(self, x: i64) -> i64: ... + def __rxor__(self, x: i64) -> i64: ... + def __lshift__(self, x: i64) -> i64: ... + def __rlshift__(self, x: i64) -> i64: ... + def __rshift__(self, x: i64) -> i64: ... + def __rrshift__(self, x: i64) -> i64: ... + def __neg__(self) -> i64: ... + def __invert__(self) -> i64: ... + def __pos__(self) -> i64: ... + def __lt__(self, x: i64) -> bool: ... + def __le__(self, x: i64) -> bool: ... + def __ge__(self, x: i64) -> bool: ... + def __gt__(self, x: i64) -> bool: ... + def __index__(self) -> int: ... + +class i32: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i32: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i32: ... + + def __add__(self, x: i32) -> i32: ... + def __radd__(self, x: i32) -> i32: ... + def __sub__(self, x: i32) -> i32: ... + def __rsub__(self, x: i32) -> i32: ... + def __mul__(self, x: i32) -> i32: ... + def __rmul__(self, x: i32) -> i32: ... + def __floordiv__(self, x: i32) -> i32: ... + def __rfloordiv__(self, x: i32) -> i32: ... + def __mod__(self, x: i32) -> i32: ... + def __rmod__(self, x: i32) -> i32: ... + def __and__(self, x: i32) -> i32: ... + def __rand__(self, x: i32) -> i32: ... + def __or__(self, x: i32) -> i32: ... + def __ror__(self, x: i32) -> i32: ... + def __xor__(self, x: i32) -> i32: ... + def __rxor__(self, x: i32) -> i32: ... + def __lshift__(self, x: i32) -> i32: ... + def __rlshift__(self, x: i32) -> i32: ... + def __rshift__(self, x: i32) -> i32: ... + def __rrshift__(self, x: i32) -> i32: ... + def __neg__(self) -> i32: ... + def __invert__(self) -> i32: ... + def __pos__(self) -> i32: ... + def __lt__(self, x: i32) -> bool: ... + def __le__(self, x: i32) -> bool: ... + def __ge__(self, x: i32) -> bool: ... + def __gt__(self, x: i32) -> bool: ... + def __index__(self) -> int: ... diff --git a/mypyc/test-data/run-i32.test b/mypyc/test-data/run-i32.test index 384e6bd4f02c..af99fb79d35e 100644 --- a/mypyc/test-data/run-i32.test +++ b/mypyc/test-data/run-i32.test @@ -1,9 +1,7 @@ [case testI32BasicOps] from typing import Any, Tuple -MYPY = False -if MYPY: - from mypy_extensions import i32, i64 +from mypy_extensions import i32, i64 from testutil import assertRaises diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index ea94741dbd51..cd4ac19532d2 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -1,9 +1,7 @@ [case testI64BasicOps] from typing import List, Any, Tuple, Union -MYPY = False -if MYPY: - from mypy_extensions import i64, i32 +from mypy_extensions import i64, i32 from testutil import assertRaises @@ -517,13 +515,9 @@ def test_isinstance() -> None: from typing import Any, Tuple import sys -from mypy_extensions import mypyc_attr +from mypy_extensions import mypyc_attr, i64 from typing_extensions import Final -MYPY = False -if MYPY: - from mypy_extensions import i64 - from testutil import assertRaises def maybe_raise(n: i64, error: bool) -> i64: @@ -911,9 +905,7 @@ from typing_extensions import Final MAGIC: Final = -113 -MYPY = False -if MYPY: - from mypy_extensions import i64 +from mypy_extensions import i64 def f(x: i64, y: i64 = 5) -> i64: return x + y @@ -1211,9 +1203,7 @@ def test_magic_default() -> None: [case testI64UndefinedLocal] from typing_extensions import Final -MYPY = False -if MYPY: - from mypy_extensions import i64, i32 +from mypy_extensions import i64, i32 from testutil import assertRaises @@ -1346,9 +1336,7 @@ def test_many_locals() -> None: from typing import Any from typing_extensions import Final -MYPY = False -if MYPY: - from mypy_extensions import i64, trait +from mypy_extensions import i64, trait from testutil import assertRaises diff --git a/pyproject.toml b/pyproject.toml index 1348b9463639..328b9bf159a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ requires = [ "wheel >= 0.30.0", # the following is from mypy-requirements.txt "typing_extensions>=3.10", - "mypy_extensions>=0.4.3", + "mypy_extensions>=1.0.0", "typed_ast>=1.4.0,<2; python_version<'3.8'", "tomli>=1.1.0; python_version<'3.11'", # the following is from build-requirements.txt diff --git a/setup.py b/setup.py index a148237f0b95..516a639f3bb2 100644 --- a/setup.py +++ b/setup.py @@ -213,7 +213,7 @@ def run(self): install_requires=[ "typed_ast >= 1.4.0, < 2; python_version<'3.8'", "typing_extensions>=3.10", - "mypy_extensions >= 0.4.3", + "mypy_extensions >= 1.0.0", "tomli>=1.1.0; python_version<'3.11'", ], # Same here. diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index fed16bc683e2..93d136936003 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6359,3 +6359,16 @@ from m import Foo [file m.py] from missing_module import Meta # type: ignore[import] class Foo(metaclass=Meta): ... + +[case testIncrementalNativeInt] +import a +[file a.py] +from mypy_extensions import i64 +x: i64 = 0 +[file a.py.2] +from mypy_extensions import i64 +x: i64 = 0 +y: int = x +[builtins fixtures/tuple.pyi] +[out] +[out2] diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index b414eba9f679..9197e2f97367 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1877,3 +1877,27 @@ for value in enum_iter(socket.SocketKind): _testEnumIterMetaInference.py:8: note: Revealed type is "typing.Iterator[_E`-1]" _testEnumIterMetaInference.py:9: note: Revealed type is "_E`-1" _testEnumIterMetaInference.py:13: note: Revealed type is "socket.SocketKind" + +[case testNativeIntTypes] +# Spot check various native int operations with full stubs. +from mypy_extensions import i64, i32 + +x: i64 = 0 +y: int = x +x = i64(0) +y = int(x) +i64() +i64("12") +i64("ab", 16) +i64(1.2) +float(i64(1)) + +i64(1) + i32(2) # Error +reveal_type(x + y) +reveal_type(y + x) +a = [0] +a[x] +[out] +_testNativeIntTypes.py:14: error: Unsupported operand types for + ("i64" and "i32") +_testNativeIntTypes.py:15: note: Revealed type is "mypy_extensions.i64" +_testNativeIntTypes.py:16: note: Revealed type is "mypy_extensions.i64" From 725214b6bb75e2dd4a01a21ed707c311d15d7bd3 Mon Sep 17 00:00:00 2001 From: hamdanal <93259987+hamdanal@users.noreply.github.com> Date: Tue, 7 Feb 2023 07:50:34 +0100 Subject: [PATCH 255/292] stubgen: preserve PEP 604 Unions in generated pyi files (#14601) When a PEP 604 Union exists in the runtime, stubgen was generating a `Union[...]` syntax without importing `Union` from `typing`. With this change, stubgen preserves the ` | `-unions in the output. Fixes #12929 Closes #13428 Ref #12920 --- mypy/stubgen.py | 4 ++++ test-data/unit/stubgen.test | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 51ee1b93de14..6cb4669887fe 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -134,6 +134,7 @@ TypeList, TypeStrVisitor, UnboundType, + UnionType, get_proper_type, ) from mypy.visitor import NodeVisitor @@ -326,6 +327,9 @@ def visit_none_type(self, t: NoneType) -> str: def visit_type_list(self, t: TypeList) -> str: return f"[{self.list_str(t.items)}]" + def visit_union_type(self, t: UnionType) -> str: + return " | ".join([item.accept(self) for item in t.items]) + def args_str(self, args: Iterable[Type]) -> str: """Convert an array of arguments to strings and join the results with commas. diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 4909c0005412..8e4285b7de2e 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2783,3 +2783,13 @@ T = TypeVar("T", bound=str | None) from typing import TypeVar T = TypeVar('T', bound=str | None) + + +[case testPEP604UnionType] +a: str | int + +def f(x: str | None) -> None: ... +[out] +a: str | int + +def f(x: str | None) -> None: ... From 8cc024e0b721f159caee2faf9377cc1c9a1997fe Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Tue, 7 Feb 2023 06:19:47 -0800 Subject: [PATCH 256/292] update upload-pypi script to the new versioning scheme (#14625) When uploading 1.0.0, I realized that this script needs updating. --- misc/upload-pypi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index e60ec3cca207..9d8827c5e46c 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -119,7 +119,7 @@ def upload_dist(dist: Path, dry_run: bool = True) -> None: def upload_to_pypi(version: str, dry_run: bool = True) -> None: - assert re.match(r"v?0\.[0-9]{3}(\+\S+)?$", version) + assert re.match(r"v?[1-9]\.[0-9]+\.[0-9](\+\S+)?$", version) if "dev" in version: assert dry_run, "Must use --dry-run with dev versions of mypy" if version.startswith("v"): From 11c63aa6aa50ec4b9b71c57631e4813c65773e85 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 7 Feb 2023 16:39:13 +0000 Subject: [PATCH 257/292] Consistently use type-abstract error code (#14619) Ref #4717 Although function use case is much more important, the variable assignment should use the same error code, otherwise this may cause confusion. --- mypy/errors.py | 2 +- mypy/messages.py | 4 +++- test-data/unit/check-errorcodes.test | 5 +++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mypy/errors.py b/mypy/errors.py index 7cc0c5764861..ee1fa137dfe4 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -24,7 +24,7 @@ # Keep track of the original error code when the error code of a message is changed. # This is used to give notes about out-of-date "type: ignore" comments. -original_error_codes: Final = {codes.LITERAL_REQ: codes.MISC} +original_error_codes: Final = {codes.LITERAL_REQ: codes.MISC, codes.TYPE_ABSTRACT: codes.MISC} class ErrorInfo: diff --git a/mypy/messages.py b/mypy/messages.py index 23b6f7c0e991..aefe65f8de09 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1787,7 +1787,9 @@ def bad_proto_variance( def concrete_only_assign(self, typ: Type, context: Context) -> None: self.fail( - f"Can only assign concrete classes to a variable of type {format_type(typ)}", context + f"Can only assign concrete classes to a variable of type {format_type(typ)}", + context, + code=codes.TYPE_ABSTRACT, ) def concrete_only_call(self, typ: Type, context: Context) -> None: diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 6e848e6a1e39..8bf12eca1f59 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -998,6 +998,11 @@ T = TypeVar("T") def test(tp: Type[T]) -> T: ... test(C) # E: Only concrete class can be given where "Type[C]" is expected [type-abstract] +class D(C): + @abc.abstractmethod + def bar(self) -> None: ... +cls: Type[C] = D # E: Can only assign concrete classes to a variable of type "Type[C]" [type-abstract] + [case testUncheckedAnnotationCodeShown] def f(): x: int = "no" # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs [annotation-unchecked] From 8d93b67fa19d53dfef5a7af6f5c1b8b2ee76d4a6 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 7 Feb 2023 16:48:28 +0000 Subject: [PATCH 258/292] Make typeddict-unknown-key sub-code of typeddict-item (#14620) The PR that added the error code didn't make into 1.0, so I make it a sub-code, to improve backwards compatibility. Also fixing a type while I am touching this code. --- docs/source/error_code_list.rst | 3 +++ mypy/errorcodes.py | 7 +++++-- mypy/messages.py | 4 ++-- test-data/unit/check-errorcodes.test | 8 ++++++++ 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index dd049ee8bbdf..4d402226a589 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -526,6 +526,9 @@ Whereas reading an unknown value will generate the more generic/serious # Error: TypedDict "Point" has no key "z" [typeddict-item] _ = a["z"] +.. note:: + + This error code is a sub-error code of a wider ``[typeddict-item]`` code. Check that type of target is known [has-type] --------------------------------------------- diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 8881a767d72e..3d8b1096ed4f 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -84,8 +84,11 @@ def __str__(self) -> str: TYPEDDICT_ITEM: Final = ErrorCode( "typeddict-item", "Check items when constructing TypedDict", "General" ) -TYPPEDICT_UNKNOWN_KEY: Final = ErrorCode( - "typeddict-unknown-key", "Check unknown keys when constructing TypedDict", "General" +TYPEDDICT_UNKNOWN_KEY: Final = ErrorCode( + "typeddict-unknown-key", + "Check unknown keys when constructing TypedDict", + "General", + sub_code_of=TYPEDDICT_ITEM, ) HAS_TYPE: Final = ErrorCode( "has-type", "Check that type of reference can be determined", "General" diff --git a/mypy/messages.py b/mypy/messages.py index aefe65f8de09..7716e1323e9f 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1652,7 +1652,7 @@ def unexpected_typeddict_keys( format_key_list(extra, short=True), format_type(typ) ), context, - code=codes.TYPPEDICT_UNKNOWN_KEY, + code=codes.TYPEDDICT_UNKNOWN_KEY, ) if missing or extra: # No need to check for further errors @@ -1693,7 +1693,7 @@ def typeddict_key_not_found( context, ) else: - err_code = codes.TYPPEDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM + err_code = codes.TYPEDDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM self.fail( f'TypedDict {format_type(typ)} has no key "{item_name}"', context, code=err_code ) diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 8bf12eca1f59..8b3567ab7cf6 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -481,6 +481,14 @@ not_exist = a['not_exist'] # type: ignore[typeddict-item] [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testErrorCodeTypedDictSubCodeIgnore] +from typing_extensions import TypedDict +class D(TypedDict): + x: int +d: D = {'x': 1, 'y': 2} # type: ignore[typeddict-item] +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testErrorCodeCannotDetermineType] y = x # E: Cannot determine type of "x" [has-type] # E: Name "x" is used before definition [used-before-def] reveal_type(y) # N: Revealed type is "Any" From 35b2926adc5aad67ba6e07d4c7c5d91daffb286b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 7 Feb 2023 18:18:14 +0000 Subject: [PATCH 259/292] Consistently use literal-required error code for TypedDicts (#14621) Ref #7178 This code is used for some TypedDict errors, but `misc` was used for others. I make it more consistent. Also this code looks undocumented, so I add some basic docs. --- docs/source/error_code_list.rst | 29 +++++++++++++++++++++++++++++ mypy/checkexpr.py | 6 +++++- mypy/plugins/default.py | 19 ++++++++++++++++--- 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 4d402226a589..0388cd2165dd 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -804,6 +804,35 @@ consistently when using the call-based syntax. Example: # Error: First argument to namedtuple() should be "Point2D", not "Point" Point2D = NamedTuple("Point", [("x", int), ("y", int)]) +Check that literal is used where expected [literal-required] +------------------------------------------------------------ + +There are some places where only a (string) literal value is expected for +the purposes of static type checking, for example a ``TypedDict`` key, or +a ``__match_args__`` item. Providing a ``str``-valued variable in such contexts +will result in an error. Note however, in many cases you can use ``Final``, +or ``Literal`` variables, for example: + +.. code-block:: python + + from typing import Final, Literal, TypedDict + + class Point(TypedDict): + x: int + y: int + + def test(p: Point) -> None: + X: Final = "x" + p[X] # OK + + Y: Literal["y"] = "y" + p[Y] # OK + + key = "x" # Inferred type of key is `str` + # Error: TypedDict key must be a string literal; + # expected one of ("x", "y") [literal-required] + p[key] + Check that overloaded functions have an implementation [no-overload-impl] ------------------------------------------------------------------------- diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 569928fbd014..9992821f1b4a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -724,7 +724,11 @@ def validate_typeddict_kwargs(self, kwargs: DictExpr) -> dict[str, Expression] | literal_value = values[0] if literal_value is None: key_context = item_name_expr or item_arg - self.chk.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, key_context) + self.chk.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_context, + code=codes.LITERAL_REQ, + ) return None else: item_names.append(literal_value) diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index 04971868e8f4..4d6f46860939 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -3,6 +3,7 @@ from functools import partial from typing import Callable +import mypy.errorcodes as codes from mypy import message_registry from mypy.nodes import DictExpr, IntExpr, StrExpr, UnaryExpr from mypy.plugin import ( @@ -264,7 +265,11 @@ def typed_dict_pop_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) value_types = [] @@ -319,7 +324,11 @@ def typed_dict_setdefault_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) default_type = ctx.arg_types[1][0] @@ -357,7 +366,11 @@ def typed_dict_delitem_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) for key in keys: From f6a8037cccacfe60459d4e64b962d31388a7fd3e Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Tue, 7 Feb 2023 18:24:45 +0000 Subject: [PATCH 260/292] Adjust inconsistent dataclasses plugin error messages (#14637) This commit adds quotes around Python identifiers in two error messages, and points the error for `"eq" must be True if "order" is True` more directly at the decorator that triggers the error message. --- mypy/plugins/dataclasses.py | 4 ++-- test-data/unit/check-dataclass-transform.test | 4 ++-- test-data/unit/check-dataclasses.test | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 75496d5e56f9..6306b3a77ae9 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -229,7 +229,7 @@ def transform(self) -> bool: # Add <, >, <=, >=, but only if the class has an eq method. if decorator_arguments["order"]: if not decorator_arguments["eq"]: - ctx.api.fail("eq must be True if order is True", ctx.cls) + ctx.api.fail('"eq" must be True if "order" is True', ctx.reason) for method_name in ["__lt__", "__gt__", "__le__", "__ge__"]: # Like for __eq__ and __ne__, we want "other" to match @@ -247,7 +247,7 @@ def transform(self) -> bool: if existing_method is not None and not existing_method.plugin_generated: assert existing_method.node ctx.api.fail( - f"You may not have a custom {method_name} method when order=True", + f'You may not have a custom "{method_name}" method when "order" is True', existing_method.node, ) diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 1a25c087c5a6..00591d46f834 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -55,8 +55,8 @@ def my_dataclass(*, eq: bool, order: bool) -> Callable[[Type], Type]: return cls return transform -@my_dataclass(eq=False, order=True) -class Person: # E: eq must be True if order is True +@my_dataclass(eq=False, order=True) # E: "eq" must be True if "order" is True +class Person: name: str age: int diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 631a92f9963b..4d85be391186 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -672,8 +672,8 @@ app1 >= app3 # flags: --python-version 3.7 from dataclasses import dataclass -@dataclass(eq=False, order=True) -class Application: # E: eq must be True if order is True +@dataclass(eq=False, order=True) # E: "eq" must be True if "order" is True +class Application: ... [builtins fixtures/dataclasses.pyi] @@ -684,7 +684,7 @@ from dataclasses import dataclass @dataclass(order=True) class Application: - def __lt__(self, other: 'Application') -> bool: # E: You may not have a custom __lt__ method when order=True + def __lt__(self, other: 'Application') -> bool: # E: You may not have a custom "__lt__" method when "order" is True ... [builtins fixtures/dataclasses.pyi] From 891e035ff661deec96479e990a6a7695fdfe8af6 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 8 Feb 2023 01:11:33 +0000 Subject: [PATCH 261/292] Fix crash on star unpacking to underscore (#14624) Fixes #14250 This one is interesting. It looks like most likely this was caused by my PR https://github.com/python/mypy/pull/7127 that fixed other crash. After looking a bit more, `StarType` is something old, and should never by used. At least I didn't find `visit_star_type()` in any of the type visitors. Actually mypy already uses `assert False`, if we get to a non-special-cased star expression. Btw, I noticed that `pythoneval` test with empty expected output passes in case of a crash (at least on my machine), so I fix this too. --- mypy/checker.py | 8 +++----- mypy/checkexpr.py | 6 +++--- mypy/semanal.py | 8 +------- mypy/server/astmerge.py | 4 ---- mypy/test/testpythoneval.py | 4 ++++ mypy/type_visitor.py | 14 ++------------ mypy/typeanal.py | 16 +--------------- mypy/types.py | 29 ----------------------------- mypy/typetraverser.py | 4 ---- test-data/unit/pythoneval.test | 23 +++++++++++++++++++++++ 10 files changed, 37 insertions(+), 79 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index c9d2d3ede283..8e1de9a07b4c 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -192,7 +192,6 @@ Overloaded, PartialType, ProperType, - StarType, TupleType, Type, TypeAliasType, @@ -3288,7 +3287,7 @@ def check_assignment_to_multiple_lvalues( last_idx: int | None = None for idx_rval, rval in enumerate(rvalue.items): if isinstance(rval, StarExpr): - typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) + typs = get_proper_type(self.expr_checker.accept(rval.expr)) if isinstance(typs, TupleType): rvalues.extend([TempNode(typ) for typ in typs.items]) elif self.type_is_iterable(typs) and isinstance(typs, Instance): @@ -3311,7 +3310,7 @@ def check_assignment_to_multiple_lvalues( iterable_end: int | None = None for i, rval in enumerate(rvalues): if isinstance(rval, StarExpr): - typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) + typs = get_proper_type(self.expr_checker.accept(rval.expr)) if self.type_is_iterable(typs) and isinstance(typs, Instance): if iterable_start is None: iterable_start = i @@ -3674,8 +3673,7 @@ def check_lvalue(self, lvalue: Lvalue) -> tuple[Type | None, IndexExpr | None, V ] lvalue_type = TupleType(types, self.named_type("builtins.tuple")) elif isinstance(lvalue, StarExpr): - typ, _, _ = self.check_lvalue(lvalue.expr) - lvalue_type = StarType(typ) if typ else None + lvalue_type, _, _ = self.check_lvalue(lvalue.expr) else: lvalue_type = self.expr_checker.accept(lvalue) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 9992821f1b4a..4cfbd0811025 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -140,7 +140,6 @@ ParamSpecType, PartialType, ProperType, - StarType, TupleType, Type, TypeAliasType, @@ -5160,8 +5159,9 @@ def visit_typeddict_expr(self, e: TypedDictExpr) -> Type: def visit__promote_expr(self, e: PromoteExpr) -> Type: return e.type - def visit_star_expr(self, e: StarExpr) -> StarType: - return StarType(self.accept(e.expr)) + def visit_star_expr(self, e: StarExpr) -> Type: + # TODO: should this ever be called (see e.g. mypyc visitor)? + return self.accept(e.expr) def object_type(self) -> Instance: """Return instance type 'object'.""" diff --git a/mypy/semanal.py b/mypy/semanal.py index 1256133cb5f3..ba5a6bc67647 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -257,7 +257,6 @@ ParamSpecType, PlaceholderType, ProperType, - StarType, TrivialSyntheticTypeTranslator, TupleType, Type, @@ -3873,8 +3872,6 @@ def check_lvalue_validity(self, node: Expression | SymbolNode | None, ctx: Conte self.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, ctx) def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: - if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr): - self.fail("Star type only allowed for starred expressions", lvalue) if isinstance(lvalue, RefExpr): lvalue.is_inferred_def = False if isinstance(lvalue.node, Var): @@ -3902,10 +3899,7 @@ def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: self.fail("Tuple type expected for multiple variables", lvalue) elif isinstance(lvalue, StarExpr): # Historical behavior for the old parser - if isinstance(typ, StarType): - self.store_declared_types(lvalue.expr, typ.type) - else: - self.store_declared_types(lvalue.expr, typ) + self.store_declared_types(lvalue.expr, typ) else: # This has been flagged elsewhere as an error, so just ignore here. pass diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 6ce737c42520..1ec6d572a82c 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -95,7 +95,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, SyntheticTypeVisitor, TupleType, Type, @@ -519,9 +518,6 @@ def visit_callable_argument(self, typ: CallableArgument) -> None: def visit_ellipsis_type(self, typ: EllipsisType) -> None: pass - def visit_star_type(self, typ: StarType) -> None: - typ.type.accept(self) - def visit_uninhabited_type(self, typ: UninhabitedType) -> None: pass diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index 6f937fee67b7..02dd11655382 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -81,6 +81,10 @@ def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None # Normalize paths so that the output is the same on Windows and Linux/macOS. line = line.replace(test_temp_dir + os.sep, test_temp_dir + "/") output.append(line.rstrip("\r\n")) + if returncode > 1 and not testcase.output: + # Either api.run() doesn't work well in case of a crash, or pytest interferes with it. + # Tweak output to prevent tests with empty expected output to pass in case of a crash. + output.append("!!! Mypy crashed !!!") if returncode == 0 and not output: # Execute the program. proc = subprocess.run( diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index c5324357117b..5a5643f35c01 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -35,7 +35,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, TupleType, Type, TypeAliasType, @@ -153,11 +152,8 @@ def visit_unpack_type(self, t: UnpackType) -> T: class SyntheticTypeVisitor(TypeVisitor[T]): """A TypeVisitor that also knows how to visit synthetic AST constructs. - Not just real types.""" - - @abstractmethod - def visit_star_type(self, t: StarType) -> T: - pass + Not just real types. + """ @abstractmethod def visit_type_list(self, t: TypeList) -> T: @@ -386,9 +382,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> T: def visit_literal_type(self, t: LiteralType) -> T: return self.strategy([]) - def visit_star_type(self, t: StarType) -> T: - return t.type.accept(self) - def visit_union_type(self, t: UnionType) -> T: return self.query_types(t.items) @@ -529,9 +522,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> bool: def visit_literal_type(self, t: LiteralType) -> bool: return self.default - def visit_star_type(self, t: StarType) -> bool: - return t.type.accept(self) - def visit_union_type(self, t: UnionType) -> bool: return self.query_types(t.items) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 2cd136e53842..f3329af6207a 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -64,7 +64,6 @@ PlaceholderType, RawExpressionType, RequiredType, - StarType, SyntheticTypeVisitor, TrivialSyntheticTypeTranslator, TupleType, @@ -1031,17 +1030,7 @@ def visit_tuple_type(self, t: TupleType) -> Type: code=codes.SYNTAX, ) return AnyType(TypeOfAny.from_error) - star_count = sum(1 for item in t.items if isinstance(item, StarType)) - if star_count > 1: - self.fail("At most one star type allowed in a tuple", t) - if t.implicit: - return TupleType( - [AnyType(TypeOfAny.from_error) for _ in t.items], - self.named_type("builtins.tuple"), - t.line, - ) - else: - return AnyType(TypeOfAny.from_error) + any_type = AnyType(TypeOfAny.special_form) # If the fallback isn't filled in yet, its type will be the falsey FakeInfo fallback = ( @@ -1093,9 +1082,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> Type: def visit_literal_type(self, t: LiteralType) -> Type: return t - def visit_star_type(self, t: StarType) -> Type: - return StarType(self.anal_type(t.type), t.line) - def visit_union_type(self, t: UnionType) -> Type: if ( t.uses_pep604_syntax is True diff --git a/mypy/types.py b/mypy/types.py index 90d33839c693..6c036ccacecd 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2592,28 +2592,6 @@ def is_singleton_type(self) -> bool: return self.is_enum_literal() or isinstance(self.value, bool) -class StarType(ProperType): - """The star type *type_parameter. - - This is not a real type but a syntactic AST construct. - """ - - __slots__ = ("type",) - - type: Type - - def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: - super().__init__(line, column) - self.type = type - - def accept(self, visitor: TypeVisitor[T]) -> T: - assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_star_type(self)) - - def serialize(self) -> JsonDict: - assert False, "Synthetic types don't serialize" - - class UnionType(ProperType): """The union type Union[T1, ..., Tn] (at least one type argument).""" @@ -3185,10 +3163,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> str: def visit_literal_type(self, t: LiteralType) -> str: return f"Literal[{t.value_repr()}]" - def visit_star_type(self, t: StarType) -> str: - s = t.type.accept(self) - return f"*{s}" - def visit_union_type(self, t: UnionType) -> str: s = self.list_str(t.items) return f"Union[{s}]" @@ -3245,9 +3219,6 @@ def visit_ellipsis_type(self, t: EllipsisType) -> Type: def visit_raw_expression_type(self, t: RawExpressionType) -> Type: return t - def visit_star_type(self, t: StarType) -> Type: - return t - def visit_type_list(self, t: TypeList) -> Type: return t diff --git a/mypy/typetraverser.py b/mypy/typetraverser.py index 9c4a9157ad6a..d9ab54871f4a 100644 --- a/mypy/typetraverser.py +++ b/mypy/typetraverser.py @@ -20,7 +20,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, SyntheticTypeVisitor, TupleType, Type, @@ -115,9 +114,6 @@ def visit_unbound_type(self, t: UnboundType) -> None: def visit_type_list(self, t: TypeList) -> None: self.traverse_types(t.items) - def visit_star_type(self, t: StarType) -> None: - t.type.accept(self) - def visit_ellipsis_type(self, t: EllipsisType) -> None: pass diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 9197e2f97367..fbbaecbba241 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1901,3 +1901,26 @@ a[x] _testNativeIntTypes.py:14: error: Unsupported operand types for + ("i64" and "i32") _testNativeIntTypes.py:15: note: Revealed type is "mypy_extensions.i64" _testNativeIntTypes.py:16: note: Revealed type is "mypy_extensions.i64" + +[case testStarUnpackNestedUnderscore] +from typing import Tuple, Dict, List + +def crash() -> None: + d: Dict[int, Tuple[str, int, str]] = {} + k, (v1, *_) = next(iter(d.items())) + +def test1() -> None: + vs: List[str] + d: Dict[int, Tuple[str, int, int]] = {} + k, (v1, *vs) = next(iter(d.items())) + reveal_type(vs) + +def test2() -> None: + d: Dict[int, Tuple[str, int, str]] = {} + k, (v1, *vs) = next(iter(d.items())) + reveal_type(vs) +[out] +_testStarUnpackNestedUnderscore.py:10: error: List item 0 has incompatible type "int"; expected "str" +_testStarUnpackNestedUnderscore.py:10: error: List item 1 has incompatible type "int"; expected "str" +_testStarUnpackNestedUnderscore.py:11: note: Revealed type is "builtins.list[builtins.str]" +_testStarUnpackNestedUnderscore.py:16: note: Revealed type is "builtins.list[builtins.object]" From f50561419fe21ca9f4cf64a52a0eddfc10f5ae74 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 8 Feb 2023 10:41:06 +0000 Subject: [PATCH 262/292] Fix mypy daemon docs link in README (#14644) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d9618e6bc12..6c9f01968f92 100644 --- a/README.md +++ b/README.md @@ -116,7 +116,7 @@ Yusuke Miyazaki). If you are working with large code bases, you can run mypy in dmypy run -- PROGRAM [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing -[daemon-mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html +[daemon mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html Integrations From 9e85f9b862287f86498f5fb084a13504d09326fd Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Wed, 8 Feb 2023 10:49:29 +0000 Subject: [PATCH 263/292] [dataclass_transform] Support default parameters (#14580) PEP 681 defines several parameters for `typing.dataclass_transform`. This commit adds support for collecting these arguments and forwarding them to the dataclasses plugin. For this first iteration, only the `*_default` parameters are supported; `field_specifiers` will be implemented in a separate commit, since it is more complicated. --- mypy/nodes.py | 73 ++++++++-- mypy/plugins/dataclasses.py | 106 +++++++++----- mypy/semanal.py | 53 ++++--- mypy/semanal_main.py | 4 +- mypy/semanal_shared.py | 41 ++++++ test-data/unit/check-dataclass-transform.test | 129 +++++++++++++++++- test-data/unit/fixtures/dataclasses.pyi | 1 + test-data/unit/fixtures/typing-full.pyi | 9 ++ test-data/unit/fixtures/typing-medium.pyi | 2 - test-data/unit/lib-stub/typing_extensions.pyi | 9 +- 10 files changed, 352 insertions(+), 75 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 72350c8d9925..534ba7f82607 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -480,13 +480,7 @@ def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) -FUNCBASE_FLAGS: Final = [ - "is_property", - "is_class", - "is_static", - "is_final", - "is_dataclass_transform", -] +FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] class FuncBase(Node): @@ -512,7 +506,6 @@ class FuncBase(Node): "is_static", # Uses "@staticmethod" "is_final", # Uses "@final" "_fullname", - "is_dataclass_transform", # Is decorated with "@typing.dataclass_transform" or similar ) def __init__(self) -> None: @@ -531,7 +524,6 @@ def __init__(self) -> None: self.is_final = False # Name with module prefix self._fullname = "" - self.is_dataclass_transform = False @property @abstractmethod @@ -758,6 +750,8 @@ class FuncDef(FuncItem, SymbolNode, Statement): "deco_line", "is_trivial_body", "is_mypy_only", + # Present only when a function is decorated with @typing.datasclass_transform or similar + "dataclass_transform_spec", ) __match_args__ = ("name", "arguments", "type", "body") @@ -785,6 +779,7 @@ def __init__( self.deco_line: int | None = None # Definitions that appear in if TYPE_CHECKING are marked with this flag. self.is_mypy_only = False + self.dataclass_transform_spec: DataclassTransformSpec | None = None @property def name(self) -> str: @@ -810,6 +805,11 @@ def serialize(self) -> JsonDict: "flags": get_flags(self, FUNCDEF_FLAGS), "abstract_status": self.abstract_status, # TODO: Do we need expanded, original_def? + "dataclass_transform_spec": ( + None + if self.dataclass_transform_spec is None + else self.dataclass_transform_spec.serialize() + ), } @classmethod @@ -832,6 +832,11 @@ def deserialize(cls, data: JsonDict) -> FuncDef: ret.arg_names = data["arg_names"] ret.arg_kinds = [ArgKind(x) for x in data["arg_kinds"]] ret.abstract_status = data["abstract_status"] + ret.dataclass_transform_spec = ( + DataclassTransformSpec.deserialize(data["dataclass_transform_spec"]) + if data["dataclass_transform_spec"] is not None + else None + ) # Leave these uninitialized so that future uses will trigger an error del ret.arguments del ret.max_pos @@ -3857,6 +3862,56 @@ def deserialize(cls, data: JsonDict) -> SymbolTable: return st +class DataclassTransformSpec: + """Specifies how a dataclass-like transform should be applied. The fields here are based on the + parameters accepted by `typing.dataclass_transform`.""" + + __slots__ = ( + "eq_default", + "order_default", + "kw_only_default", + "frozen_default", + "field_specifiers", + ) + + def __init__( + self, + *, + eq_default: bool | None = None, + order_default: bool | None = None, + kw_only_default: bool | None = None, + field_specifiers: tuple[str, ...] | None = None, + # Specified outside of PEP 681: + # frozen_default was added to CPythonin https://github.com/python/cpython/pull/99958 citing + # positive discussion in typing-sig + frozen_default: bool | None = None, + ): + self.eq_default = eq_default if eq_default is not None else True + self.order_default = order_default if order_default is not None else False + self.kw_only_default = kw_only_default if kw_only_default is not None else False + self.frozen_default = frozen_default if frozen_default is not None else False + self.field_specifiers = field_specifiers if field_specifiers is not None else () + + def serialize(self) -> JsonDict: + return { + "eq_default": self.eq_default, + "order_default": self.order_default, + "kw_only_default": self.kw_only_default, + "frozen_only_default": self.frozen_default, + "field_specifiers": self.field_specifiers, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> DataclassTransformSpec: + return DataclassTransformSpec( + eq_default=data.get("eq_default"), + order_default=data.get("order_default"), + kw_only_default=data.get("kw_only_default"), + frozen_default=data.get("frozen_default"), + field_specifiers=data.get("field_specifiers"), + ) + + def get_flags(node: Node, names: list[str]) -> list[str]: return [name for name in names if getattr(node, name)] diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 6306b3a77ae9..4683b8c1ffaf 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -18,9 +18,11 @@ AssignmentStmt, CallExpr, Context, + DataclassTransformSpec, Expression, JsonDict, NameExpr, + Node, PlaceholderNode, RefExpr, SymbolTableNode, @@ -37,6 +39,7 @@ add_method, deserialize_and_fixup_type, ) +from mypy.semanal_shared import find_dataclass_transform_spec from mypy.server.trigger import make_wildcard_trigger from mypy.state import state from mypy.typeops import map_type_from_supertype @@ -56,11 +59,16 @@ # The set of decorators that generate dataclasses. dataclass_makers: Final = {"dataclass", "dataclasses.dataclass"} -# The set of functions that generate dataclass fields. -field_makers: Final = {"dataclasses.field"} SELF_TVAR_NAME: Final = "_DT" +_TRANSFORM_SPEC_FOR_DATACLASSES = DataclassTransformSpec( + eq_default=True, + order_default=False, + kw_only_default=False, + frozen_default=False, + field_specifiers=("dataclasses.Field", "dataclasses.field"), +) class DataclassAttribute: @@ -155,6 +163,7 @@ class DataclassTransformer: def __init__(self, ctx: ClassDefContext) -> None: self._ctx = ctx + self._spec = _get_transform_spec(ctx.reason) def transform(self) -> bool: """Apply all the necessary transformations to the underlying @@ -172,9 +181,9 @@ def transform(self) -> bool: return False decorator_arguments = { "init": _get_decorator_bool_argument(self._ctx, "init", True), - "eq": _get_decorator_bool_argument(self._ctx, "eq", True), - "order": _get_decorator_bool_argument(self._ctx, "order", False), - "frozen": _get_decorator_bool_argument(self._ctx, "frozen", False), + "eq": _get_decorator_bool_argument(self._ctx, "eq", self._spec.eq_default), + "order": _get_decorator_bool_argument(self._ctx, "order", self._spec.order_default), + "frozen": _get_decorator_bool_argument(self._ctx, "frozen", self._spec.frozen_default), "slots": _get_decorator_bool_argument(self._ctx, "slots", False), "match_args": _get_decorator_bool_argument(self._ctx, "match_args", True), } @@ -411,7 +420,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # Second, collect attributes belonging to the current class. current_attr_names: set[str] = set() - kw_only = _get_decorator_bool_argument(ctx, "kw_only", False) + kw_only = _get_decorator_bool_argument(ctx, "kw_only", self._spec.kw_only_default) for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. @@ -461,7 +470,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: if self._is_kw_only_type(node_type): kw_only = True - has_field_call, field_args = _collect_field_args(stmt.rvalue, ctx) + has_field_call, field_args = self._collect_field_args(stmt.rvalue, ctx) is_in_init_param = field_args.get("init") if is_in_init_param is None: @@ -614,6 +623,36 @@ def _add_dataclass_fields_magic_attribute(self) -> None: kind=MDEF, node=var, plugin_generated=True ) + def _collect_field_args( + self, expr: Expression, ctx: ClassDefContext + ) -> tuple[bool, dict[str, Expression]]: + """Returns a tuple where the first value represents whether or not + the expression is a call to dataclass.field and the second is a + dictionary of the keyword arguments that field() was called with. + """ + if ( + isinstance(expr, CallExpr) + and isinstance(expr.callee, RefExpr) + and expr.callee.fullname in self._spec.field_specifiers + ): + # field() only takes keyword arguments. + args = {} + for name, arg, kind in zip(expr.arg_names, expr.args, expr.arg_kinds): + if not kind.is_named(): + if kind.is_named(star=True): + # This means that `field` is used with `**` unpacking, + # the best we can do for now is not to fail. + # TODO: we can infer what's inside `**` and try to collect it. + message = 'Unpacking **kwargs in "field()" is not supported' + else: + message = '"field()" does not accept positional arguments' + ctx.api.fail(message, expr) + return True, {} + assert name is not None + args[name] = arg + return True, args + return False, {} + def dataclass_tag_callback(ctx: ClassDefContext) -> None: """Record that we have a dataclass in the main semantic analysis pass. @@ -631,32 +670,29 @@ def dataclass_class_maker_callback(ctx: ClassDefContext) -> bool: return transformer.transform() -def _collect_field_args( - expr: Expression, ctx: ClassDefContext -) -> tuple[bool, dict[str, Expression]]: - """Returns a tuple where the first value represents whether or not - the expression is a call to dataclass.field and the second is a - dictionary of the keyword arguments that field() was called with. +def _get_transform_spec(reason: Expression) -> DataclassTransformSpec: + """Find the relevant transform parameters from the decorator/parent class/metaclass that + triggered the dataclasses plugin. + + Although the resulting DataclassTransformSpec is based on the typing.dataclass_transform + function, we also use it for traditional dataclasses.dataclass classes as well for simplicity. + In those cases, we return a default spec rather than one based on a call to + `typing.dataclass_transform`. """ - if ( - isinstance(expr, CallExpr) - and isinstance(expr.callee, RefExpr) - and expr.callee.fullname in field_makers - ): - # field() only takes keyword arguments. - args = {} - for name, arg, kind in zip(expr.arg_names, expr.args, expr.arg_kinds): - if not kind.is_named(): - if kind.is_named(star=True): - # This means that `field` is used with `**` unpacking, - # the best we can do for now is not to fail. - # TODO: we can infer what's inside `**` and try to collect it. - message = 'Unpacking **kwargs in "field()" is not supported' - else: - message = '"field()" does not accept positional arguments' - ctx.api.fail(message, expr) - return True, {} - assert name is not None - args[name] = arg - return True, args - return False, {} + if _is_dataclasses_decorator(reason): + return _TRANSFORM_SPEC_FOR_DATACLASSES + + spec = find_dataclass_transform_spec(reason) + assert spec is not None, ( + "trying to find dataclass transform spec, but reason is neither dataclasses.dataclass nor " + "decorated with typing.dataclass_transform" + ) + return spec + + +def _is_dataclasses_decorator(node: Node) -> bool: + if isinstance(node, CallExpr): + node = node.callee + if isinstance(node, RefExpr): + return node.fullname in dataclass_makers + return False diff --git a/mypy/semanal.py b/mypy/semanal.py index ba5a6bc67647..cd5b82f80b1d 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -99,6 +99,7 @@ ConditionalExpr, Context, ContinueStmt, + DataclassTransformSpec, Decorator, DelStmt, DictExpr, @@ -213,6 +214,7 @@ PRIORITY_FALLBACKS, SemanticAnalyzerInterface, calculate_tuple_fallback, + find_dataclass_transform_spec, has_placeholder, set_callable_name as set_callable_name, ) @@ -1523,7 +1525,7 @@ def visit_decorator(self, dec: Decorator) -> None: elif isinstance(d, CallExpr) and refers_to_fullname( d.callee, DATACLASS_TRANSFORM_NAMES ): - dec.func.is_dataclass_transform = True + dec.func.dataclass_transform_spec = self.parse_dataclass_transform_spec(d) elif not dec.var.is_property: # We have seen a "non-trivial" decorator before seeing @property, if # we will see a @property later, give an error, as we don't support this. @@ -1728,7 +1730,7 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: # Special case: if the decorator is itself decorated with # typing.dataclass_transform, apply the hook for the dataclasses plugin # TODO: remove special casing here - if hook is None and is_dataclass_transform_decorator(decorator): + if hook is None and find_dataclass_transform_spec(decorator): hook = dataclasses_plugin.dataclass_tag_callback if hook: hook(ClassDefContext(defn, decorator, self)) @@ -6456,6 +6458,35 @@ def set_future_import_flags(self, module_name: str) -> None: def is_future_flag_set(self, flag: str) -> bool: return self.modules[self.cur_mod_id].is_future_flag_set(flag) + def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSpec: + """Build a DataclassTransformSpec from the arguments passed to the given call to + typing.dataclass_transform.""" + parameters = DataclassTransformSpec() + for name, value in zip(call.arg_names, call.args): + # field_specifiers is currently the only non-boolean argument; check for it first so + # so the rest of the block can fail through to handling booleans + if name == "field_specifiers": + self.fail('"field_specifiers" support is currently unimplemented', call) + continue + + boolean = self.parse_bool(value) + if boolean is None: + self.fail(f'"{name}" argument must be a True or False literal', call) + continue + + if name == "eq_default": + parameters.eq_default = boolean + elif name == "order_default": + parameters.order_default = boolean + elif name == "kw_only_default": + parameters.kw_only_default = boolean + elif name == "frozen_default": + parameters.frozen_default = boolean + else: + self.fail(f'Unrecognized dataclass_transform parameter "{name}"', call) + + return parameters + def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: if isinstance(sig, CallableType): @@ -6645,21 +6676,3 @@ def halt(self, reason: str = ...) -> NoReturn: return isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) ) - - -def is_dataclass_transform_decorator(node: Node | None) -> bool: - if isinstance(node, RefExpr): - return is_dataclass_transform_decorator(node.node) - if isinstance(node, CallExpr): - # Like dataclasses.dataclass, transform-based decorators can be applied either with or - # without parameters; ie, both of these forms are accepted: - # - # @typing.dataclass_transform - # class Foo: ... - # @typing.dataclass_transform(eq=True, order=True, ...) - # class Bar: ... - # - # We need to unwrap the call for the second variant. - return is_dataclass_transform_decorator(node.callee) - - return isinstance(node, Decorator) and node.func.is_dataclass_transform diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index d2dd0e32398d..796a862c35e7 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -41,7 +41,6 @@ from mypy.semanal import ( SemanticAnalyzer, apply_semantic_analyzer_patches, - is_dataclass_transform_decorator, remove_imported_names_from_symtable, ) from mypy.semanal_classprop import ( @@ -51,6 +50,7 @@ check_protocol_status, ) from mypy.semanal_infer import infer_decorator_signature_if_simple +from mypy.semanal_shared import find_dataclass_transform_spec from mypy.semanal_typeargs import TypeArgumentAnalyzer from mypy.server.aststrip import SavedAttributes from mypy.util import is_typeshed_file @@ -467,7 +467,7 @@ def apply_hooks_to_class( # Special case: if the decorator is itself decorated with # typing.dataclass_transform, apply the hook for the dataclasses plugin # TODO: remove special casing here - if hook is None and is_dataclass_transform_decorator(decorator): + if hook is None and find_dataclass_transform_spec(decorator): hook = dataclasses_plugin.dataclass_class_maker_callback if hook: diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 11c4af314a3b..05edf2ac073f 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -11,10 +11,14 @@ from mypy import join from mypy.errorcodes import ErrorCode from mypy.nodes import ( + CallExpr, Context, + DataclassTransformSpec, + Decorator, Expression, FuncDef, Node, + RefExpr, SymbolNode, SymbolTable, SymbolTableNode, @@ -341,3 +345,40 @@ def visit_placeholder_type(self, t: PlaceholderType) -> bool: def has_placeholder(typ: Type) -> bool: """Check if a type contains any placeholder types (recursively).""" return typ.accept(HasPlaceholders()) + + +def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | None: + """ + Find the dataclass transform spec for the given node, if any exists. + + Per PEP 681 (https://peps.python.org/pep-0681/#the-dataclass-transform-decorator), dataclass + transforms can be specified in multiple ways, including decorator functions and + metaclasses/base classes. This function resolves the spec from any of these variants. + """ + + # The spec only lives on the function/class definition itself, so we need to unwrap down to that + # point + if isinstance(node, CallExpr): + # Like dataclasses.dataclass, transform-based decorators can be applied either with or + # without parameters; ie, both of these forms are accepted: + # + # @typing.dataclass_transform + # class Foo: ... + # @typing.dataclass_transform(eq=True, order=True, ...) + # class Bar: ... + # + # We need to unwrap the call for the second variant. + node = node.callee + + if isinstance(node, RefExpr): + node = node.node + + if isinstance(node, Decorator): + # typing.dataclass_transform usage must always result in a Decorator; it always uses the + # `@dataclass_transform(...)` syntax and never `@dataclass_transform` + node = node.func + + if isinstance(node, FuncDef): + return node.dataclass_transform_spec + + return None diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 00591d46f834..01e8935b0745 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -1,5 +1,5 @@ [case testDataclassTransformReusesDataclassLogic] -# flags: --python-version 3.7 +# flags: --python-version 3.11 from typing import dataclass_transform, Type @dataclass_transform() @@ -18,7 +18,7 @@ reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builti Person('John', 32) Person('Jonh', 21, None) # E: Too many arguments for "Person" -[typing fixtures/typing-medium.pyi] +[typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] [case testDataclassTransformIsFoundInTypingExtensions] @@ -46,7 +46,7 @@ Person('Jonh', 21, None) # E: Too many arguments for "Person" [builtins fixtures/dataclasses.pyi] [case testDataclassTransformParametersAreApplied] -# flags: --python-version 3.7 +# flags: --python-version 3.11 from typing import dataclass_transform, Callable, Type @dataclass_transform() @@ -64,11 +64,11 @@ reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builti Person('John', 32) Person('John', 21, None) # E: Too many arguments for "Person" -[typing fixtures/typing-medium.pyi] +[typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] [case testDataclassTransformParametersMustBeBoolLiterals] -# flags: --python-version 3.7 +# flags: --python-version 3.11 from typing import dataclass_transform, Callable, Type @dataclass_transform() @@ -83,5 +83,122 @@ class A: ... @my_dataclass(order=not False) # E: "order" argument must be True or False. class B: ... -[typing fixtures/typing-medium.pyi] +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDefaultParamsMustBeLiterals] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, Final + +BOOLEAN_CONSTANT = True +FINAL_BOOLEAN: Final = True + +@dataclass_transform(eq_default=BOOLEAN_CONSTANT) # E: "eq_default" argument must be a True or False literal +def foo(cls: Type) -> Type: + return cls +@dataclass_transform(eq_default=(not True)) # E: "eq_default" argument must be a True or False literal +def bar(cls: Type) -> Type: + return cls +@dataclass_transform(eq_default=FINAL_BOOLEAN) # E: "eq_default" argument must be a True or False literal +def baz(cls: Type) -> Type: + return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformUnrecognizedParamsAreErrors] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +BOOLEAN_CONSTANT = True + +@dataclass_transform(nonexistant=True) # E: Unrecognized dataclass_transform parameter "nonexistant" +def foo(cls: Type) -> Type: + return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + + +[case testDataclassTransformDefaultParams] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, Callable + +@dataclass_transform(eq_default=False) +def no_eq(*, order: bool = False) -> Callable[[Type], Type]: + return lambda cls: cls +@no_eq() +class Foo: ... +@no_eq(order=True) # E: "eq" must be True if "order" is True +class Bar: ... + + +@dataclass_transform(kw_only_default=True) +def always_use_kw(cls: Type) -> Type: + return cls +@always_use_kw +class Baz: + x: int +Baz(x=5) +Baz(5) # E: Too many positional arguments for "Baz" + +@dataclass_transform(order_default=True) +def ordered(*, eq: bool = True) -> Callable[[Type], Type]: + return lambda cls: cls +@ordered() +class A: + x: int +A(1) > A(2) + +@dataclass_transform(frozen_default=True) +def frozen(cls: Type) -> Type: + return cls +@frozen +class B: + x: int +b = B(x=1) +b.x = 2 # E: Property "x" defined in "B" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDefaultsCanBeOverridden] +# flags: --python-version 3.11 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform(kw_only_default=True) +def my_dataclass(*, kw_only: bool = True) -> Callable[[Type], Type]: + return lambda cls: cls + +@my_dataclass() +class KwOnly: + x: int +@my_dataclass(kw_only=False) +class KwOptional: + x: int + +KwOnly(5) # E: Too many positional arguments for "KwOnly" +KwOptional(5) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifiersDefaultsToEmpty] +# flags: --python-version 3.11 +from dataclasses import field, dataclass +from typing import dataclass_transform, Type + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Foo: + foo: int = field(kw_only=True) + +# Does not cause a type error because `dataclasses.field` is not a recognized field specifier by +# default +Foo(5) + +[typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index 7de40af9cfe7..ab692302a8b6 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -18,6 +18,7 @@ class ellipsis: pass class tuple(Generic[_T]): pass class int: pass class float: pass +class bytes: pass class str: pass class bool(int): pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index 04568f7c03f3..1471473249dc 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -181,3 +181,12 @@ class _TypedDict(Mapping[str, object]): def __delitem__(self, k: NoReturn) -> None: ... class _SpecialForm: pass + +def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, +) -> Callable[[T], T]: ... diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 0d0e13468013..863b0703989d 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -71,5 +71,3 @@ class ContextManager(Generic[T]): class _SpecialForm: pass TYPE_CHECKING = 1 - -def dataclass_transform() -> Callable[[T], T]: ... diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index b03fc7e6df14..22b895971521 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -58,6 +58,13 @@ def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) - def reveal_type(__obj: T) -> T: pass -def dataclass_transform() -> Callable[[T], T]: ... +def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, +) -> Callable[[T], T]: ... _FutureFeatureFixture = 0 From 6b56dc01ba919a951936e3e47602e5d3f5d5fee4 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Wed, 8 Feb 2023 10:39:50 -0800 Subject: [PATCH 264/292] [used before def] correctly handle walrus operator (#14646) Fixes #14626. I believe changing the way that we analyze call expression makes sense (first, we analyze the callee, then we analyze the arguments). --- mypy/traverser.py | 2 +- test-data/unit/check-python38.test | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mypy/traverser.py b/mypy/traverser.py index 378d44c67f47..038d948522f0 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -253,9 +253,9 @@ def visit_yield_expr(self, o: YieldExpr) -> None: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: + o.callee.accept(self) for a in o.args: a.accept(self) - o.callee.accept(self) if o.analyzed: o.analyzed.accept(self) diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index b9c798b9530e..b9f9f2173ae1 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -573,6 +573,14 @@ def foo() -> None: [x := x + y for y in [1, 2, 3]] [builtins fixtures/dict.pyi] +[case testWalrusUsedBeforeDef] +# flags: --python-version 3.8 +class C: + def f(self, c: 'C') -> None: pass + +(x := C()).f(y) # E: Cannot determine type of "y" # E: Name "y" is used before definition +(y := C()).f(y) + [case testOverloadWithPositionalOnlySelf] # flags: --python-version 3.8 from typing import overload, Optional From 786c7b07040515ccdc440005b8ff054fc6c0ebce Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 8 Feb 2023 11:51:53 -0800 Subject: [PATCH 265/292] Fix crash on deferred value constrained TypeVar (#14642) Fixes #14631 --- mypy/types.py | 8 ++++++-- test-data/unit/check-typevar-values.test | 9 +++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 6c036ccacecd..9858559ad5c1 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -590,12 +590,16 @@ def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var(self) def __hash__(self) -> int: - return hash((self.id, self.upper_bound)) + return hash((self.id, self.upper_bound, tuple(self.values))) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarType): return NotImplemented - return self.id == other.id and self.upper_bound == other.upper_bound + return ( + self.id == other.id + and self.upper_bound == other.upper_bound + and self.values == other.values + ) def serialize(self) -> JsonDict: assert not self.id.is_meta_var() diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index d5a94f96fae7..a4a4d68bd9fe 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -702,3 +702,12 @@ class Indexable: [builtins fixtures/tuple.pyi] [builtins fixtures/classmethod.pyi] + +[case testTypeVarWithValueDeferral] +from typing import TypeVar, Callable + +T = TypeVar("T", "A", "B") +Func = Callable[[], T] + +class A: ... +class B: ... From c23e831ab0e7ec827c38cc830d3ebd3f4c43cd75 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 9 Feb 2023 07:06:10 -0800 Subject: [PATCH 266/292] Allow overlapping comparisons between bytes-like types (#14658) --- mypy/checkexpr.py | 9 +++++++++ test-data/unit/check-flags.test | 26 ++++++++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4cfbd0811025..754ba6f093f5 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -192,6 +192,11 @@ "_collections_abc.dict_keys", "_collections_abc.dict_items", ] +OVERLAPPING_BYTES_ALLOWLIST: Final = { + "builtins.bytes", + "builtins.bytearray", + "builtins.memoryview", +} class TooManyUnions(Exception): @@ -3164,6 +3169,10 @@ def dangerous_comparison( return self.dangerous_comparison(left.args[0], right.args[0]) elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: return self.dangerous_comparison(left.args[0], right.args[0]) + elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in ( + OVERLAPPING_BYTES_ALLOWLIST + ): + return False if isinstance(left, LiteralType) and isinstance(right, LiteralType): if isinstance(left.value, bool) and isinstance(right.value, bool): # Comparing different booleans is not dangerous. diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index ebb3744e9f08..0ac39ebf9c10 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2148,3 +2148,29 @@ def f(x: bytes) -> None: ... f(bytearray(b"asdf")) f(memoryview(b"asdf")) # E: Argument 1 to "f" has incompatible type "memoryview"; expected "bytes" [builtins fixtures/primitives.pyi] + +[case testDisableBytearrayMemoryviewPromotionStrictEquality] +# flags: --disable-bytearray-promotion --disable-memoryview-promotion --strict-equality +def f(x: bytes, y: bytearray, z: memoryview) -> None: + x == y + y == z + x == z + 97 in x + 97 in y + 97 in z + x in y + x in z +[builtins fixtures/primitives.pyi] + +[case testEnableBytearrayMemoryviewPromotionStrictEquality] +# flags: --strict-equality +def f(x: bytes, y: bytearray, z: memoryview) -> None: + x == y + y == z + x == z + 97 in x + 97 in y + 97 in z + x in y + x in z +[builtins fixtures/primitives.pyi] From 4261e51276cb2b45d1376a3b4ef45ac9030d19c7 Mon Sep 17 00:00:00 2001 From: Stas Ilinskiy Date: Fri, 10 Feb 2023 00:30:00 -0800 Subject: [PATCH 267/292] [used before def] handle walrus declaration in match subject correctly (#14665) The subject needs to be processed outside of match statement and not as part of the first branch. Fixes #14659. --- mypy/partially_defined.py | 2 +- test-data/unit/check-python310.test | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index af09493c9cae..9b8238eff83f 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -396,8 +396,8 @@ def visit_if_stmt(self, o: IfStmt) -> None: self.tracker.end_branch_statement() def visit_match_stmt(self, o: MatchStmt) -> None: - self.tracker.start_branch_statement() o.subject.accept(self) + self.tracker.start_branch_statement() for i in range(len(o.patterns)): pattern = o.patterns[i] pattern.accept(self) diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 12fd2b43c80a..7a934348aaf2 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1813,6 +1813,19 @@ def f1(x: int) -> int: [typing fixtures/typing-medium.pyi] +[case testUsedBeforeDefMatchWalrus] +# flags: --enable-error-code used-before-def +import typing + +def f0(x: int) -> None: + a = y # E: Cannot determine type of "y" # E: Name "y" is used before definition + match y := x: + case 1: + b = y + case 2: + c = y + d = y + [case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] from typing import overload class C: From 99b04cac8b216bb426b1fab5a3ff447d1451c967 Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Fri, 10 Feb 2023 10:27:50 +0000 Subject: [PATCH 268/292] [dataclass_transform] support subclass/metaclass-based transforms (#14657) Support dataclass_transforms that use inheritance or metaclasses rather than decorators. This only needs plumbing changes so that we can get the correct metadata for a given class and trigger the dataclasses transform plugin; logic should otherwise remain the same. The code changes here are a little invasive because of how the dataclasses plugin handles it's "reason" (ie, the AST node that triggered the plugin). Currently it takes a `ClassDefContext` where `reason: Expression`, but in the case of inheritance/metaclass-based transforms, it makes more sense for the class definition itself to be the reason (since the parent class and keyword args are supplied in the class definition itself). To accommodate for this, I refactored the `DataclassTransformer` class to take a `reason: Expression | Statement` while leaving the plugin API itself alone. This mostly involved updating the identifiers used throughout the class. --- mypy/nodes.py | 14 ++ mypy/plugins/dataclasses.py | 168 +++++++++++------- mypy/semanal.py | 10 ++ mypy/semanal_main.py | 10 ++ mypy/semanal_shared.py | 26 +++ test-data/unit/check-dataclass-transform.test | 77 ++++++++ test-data/unit/fixtures/dataclasses.pyi | 1 + 7 files changed, 241 insertions(+), 65 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 534ba7f82607..2f2aa6a3efbe 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2830,6 +2830,7 @@ class is generic then it will be a type constructor of higher kind. "type_var_tuple_prefix", "type_var_tuple_suffix", "self_type", + "dataclass_transform_spec", ) _fullname: str # Fully qualified name @@ -2977,6 +2978,9 @@ class is generic then it will be a type constructor of higher kind. # Shared type variable for typing.Self in this class (if used, otherwise None). self_type: mypy.types.TypeVarType | None + # Added if the corresponding class is directly decorated with `typing.dataclass_transform` + dataclass_transform_spec: DataclassTransformSpec | None + FLAGS: Final = [ "is_abstract", "is_enum", @@ -3032,6 +3036,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_intersection = False self.metadata = {} self.self_type = None + self.dataclass_transform_spec = None def add_type_vars(self) -> None: self.has_type_var_tuple_type = False @@ -3251,6 +3256,11 @@ def serialize(self) -> JsonDict: "slots": list(sorted(self.slots)) if self.slots is not None else None, "deletable_attributes": self.deletable_attributes, "self_type": self.self_type.serialize() if self.self_type is not None else None, + "dataclass_transform_spec": ( + self.dataclass_transform_spec.serialize() + if self.dataclass_transform_spec is not None + else None + ), } return data @@ -3314,6 +3324,10 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: set_flags(ti, data["flags"]) st = data["self_type"] ti.self_type = mypy.types.TypeVarType.deserialize(st) if st is not None else None + if data.get("dataclass_transform_spec") is not None: + ti.dataclass_transform_spec = DataclassTransformSpec.deserialize( + data["dataclass_transform_spec"] + ) return ti diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 4683b8c1ffaf..3feb644dc8ea 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -17,6 +17,7 @@ Argument, AssignmentStmt, CallExpr, + ClassDef, Context, DataclassTransformSpec, Expression, @@ -25,6 +26,7 @@ Node, PlaceholderNode, RefExpr, + Statement, SymbolTableNode, TempNode, TypeAlias, @@ -36,7 +38,7 @@ from mypy.plugins.common import ( _get_decorator_bool_argument, add_attribute_to_class, - add_method, + add_method_to_class, deserialize_and_fixup_type, ) from mypy.semanal_shared import find_dataclass_transform_spec @@ -161,17 +163,26 @@ class DataclassTransformer: there are no placeholders. """ - def __init__(self, ctx: ClassDefContext) -> None: - self._ctx = ctx - self._spec = _get_transform_spec(ctx.reason) + def __init__( + self, + cls: ClassDef, + # Statement must also be accepted since class definition itself may be passed as the reason + # for subclass/metaclass-based uses of `typing.dataclass_transform` + reason: Expression | Statement, + spec: DataclassTransformSpec, + api: SemanticAnalyzerPluginInterface, + ) -> None: + self._cls = cls + self._reason = reason + self._spec = spec + self._api = api def transform(self) -> bool: """Apply all the necessary transformations to the underlying dataclass so as to ensure it is fully type checked according to the rules in PEP 557. """ - ctx = self._ctx - info = self._ctx.cls.info + info = self._cls.info attributes = self.collect_attributes() if attributes is None: # Some definitions are not ready. We need another pass. @@ -180,14 +191,14 @@ def transform(self) -> bool: if attr.type is None: return False decorator_arguments = { - "init": _get_decorator_bool_argument(self._ctx, "init", True), - "eq": _get_decorator_bool_argument(self._ctx, "eq", self._spec.eq_default), - "order": _get_decorator_bool_argument(self._ctx, "order", self._spec.order_default), - "frozen": _get_decorator_bool_argument(self._ctx, "frozen", self._spec.frozen_default), - "slots": _get_decorator_bool_argument(self._ctx, "slots", False), - "match_args": _get_decorator_bool_argument(self._ctx, "match_args", True), + "init": self._get_bool_arg("init", True), + "eq": self._get_bool_arg("eq", self._spec.eq_default), + "order": self._get_bool_arg("order", self._spec.order_default), + "frozen": self._get_bool_arg("frozen", self._spec.frozen_default), + "slots": self._get_bool_arg("slots", False), + "match_args": self._get_bool_arg("match_args", True), } - py_version = self._ctx.api.options.python_version + py_version = self._api.options.python_version # If there are no attributes, it may be that the semantic analyzer has not # processed them yet. In order to work around this, we can simply skip generating @@ -199,7 +210,7 @@ def transform(self) -> bool: and attributes ): - with state.strict_optional_set(ctx.api.options.strict_optional): + with state.strict_optional_set(self._api.options.strict_optional): args = [ attr.to_argument(info) for attr in attributes @@ -221,7 +232,9 @@ def transform(self) -> bool: Argument(nameless_var, AnyType(TypeOfAny.explicit), None, ARG_STAR2), ] - add_method(ctx, "__init__", args=args, return_type=NoneType()) + add_method_to_class( + self._api, self._cls, "__init__", args=args, return_type=NoneType() + ) if ( decorator_arguments["eq"] @@ -229,7 +242,7 @@ def transform(self) -> bool: or decorator_arguments["order"] ): # Type variable for self types in generated methods. - obj_type = ctx.api.named_type("builtins.object") + obj_type = self._api.named_type("builtins.object") self_tvar_expr = TypeVarExpr( SELF_TVAR_NAME, info.fullname + "." + SELF_TVAR_NAME, [], obj_type ) @@ -238,16 +251,16 @@ def transform(self) -> bool: # Add <, >, <=, >=, but only if the class has an eq method. if decorator_arguments["order"]: if not decorator_arguments["eq"]: - ctx.api.fail('"eq" must be True if "order" is True', ctx.reason) + self._api.fail('"eq" must be True if "order" is True', self._reason) for method_name in ["__lt__", "__gt__", "__le__", "__ge__"]: # Like for __eq__ and __ne__, we want "other" to match # the self type. - obj_type = ctx.api.named_type("builtins.object") + obj_type = self._api.named_type("builtins.object") order_tvar_def = TypeVarType( SELF_TVAR_NAME, info.fullname + "." + SELF_TVAR_NAME, -1, [], obj_type ) - order_return_type = ctx.api.named_type("builtins.bool") + order_return_type = self._api.named_type("builtins.bool") order_args = [ Argument(Var("other", order_tvar_def), order_tvar_def, None, ARG_POS) ] @@ -255,13 +268,14 @@ def transform(self) -> bool: existing_method = info.get(method_name) if existing_method is not None and not existing_method.plugin_generated: assert existing_method.node - ctx.api.fail( + self._api.fail( f'You may not have a custom "{method_name}" method when "order" is True', existing_method.node, ) - add_method( - ctx, + add_method_to_class( + self._api, + self._cls, method_name, args=order_args, return_type=order_return_type, @@ -277,12 +291,12 @@ def transform(self) -> bool: if decorator_arguments["frozen"]: if any(not parent["frozen"] for parent in parent_decorator_arguments): - ctx.api.fail("Cannot inherit frozen dataclass from a non-frozen one", info) + self._api.fail("Cannot inherit frozen dataclass from a non-frozen one", info) self._propertize_callables(attributes, settable=False) self._freeze(attributes) else: if any(parent["frozen"] for parent in parent_decorator_arguments): - ctx.api.fail("Cannot inherit non-frozen dataclass from a frozen one", info) + self._api.fail("Cannot inherit non-frozen dataclass from a frozen one", info) self._propertize_callables(attributes) if decorator_arguments["slots"]: @@ -298,12 +312,12 @@ def transform(self) -> bool: and attributes and py_version >= (3, 10) ): - str_type = ctx.api.named_type("builtins.str") + str_type = self._api.named_type("builtins.str") literals: list[Type] = [ LiteralType(attr.name, str_type) for attr in attributes if attr.is_in_init ] - match_args_type = TupleType(literals, ctx.api.named_type("builtins.tuple")) - add_attribute_to_class(ctx.api, ctx.cls, "__match_args__", match_args_type) + match_args_type = TupleType(literals, self._api.named_type("builtins.tuple")) + add_attribute_to_class(self._api, self._cls, "__match_args__", match_args_type) self._add_dataclass_fields_magic_attribute() @@ -320,10 +334,10 @@ def add_slots( if not correct_version: # This means that version is lower than `3.10`, # it is just a non-existent argument for `dataclass` function. - self._ctx.api.fail( + self._api.fail( 'Keyword argument "slots" for "dataclass" ' "is only valid in Python 3.10 and higher", - self._ctx.reason, + self._reason, ) return @@ -335,11 +349,11 @@ def add_slots( # Class explicitly specifies a different `__slots__` field. # And `@dataclass(slots=True)` is used. # In runtime this raises a type error. - self._ctx.api.fail( + self._api.fail( '"{}" both defines "__slots__" and is used with "slots=True"'.format( - self._ctx.cls.name + self._cls.name ), - self._ctx.cls, + self._cls, ) return @@ -375,8 +389,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: Return None if some dataclass base class hasn't been processed yet and thus we'll need to ask for another pass. """ - ctx = self._ctx - cls = self._ctx.cls + cls = self._cls # First, collect attributes belonging to any class in the MRO, ignoring duplicates. # @@ -397,30 +410,30 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: continue # Each class depends on the set of attributes in its dataclass ancestors. - ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) found_dataclass_supertype = True for data in info.metadata["dataclass"]["attributes"]: name: str = data["name"] - attr = DataclassAttribute.deserialize(info, data, ctx.api) + attr = DataclassAttribute.deserialize(info, data, self._api) # TODO: We shouldn't be performing type operations during the main # semantic analysis pass, since some TypeInfo attributes might # still be in flux. This should be performed in a later phase. - with state.strict_optional_set(ctx.api.options.strict_optional): - attr.expand_typevar_from_subtype(ctx.cls.info) + with state.strict_optional_set(self._api.options.strict_optional): + attr.expand_typevar_from_subtype(cls.info) found_attrs[name] = attr sym_node = cls.info.names.get(name) if sym_node and sym_node.node and not isinstance(sym_node.node, Var): - ctx.api.fail( + self._api.fail( "Dataclass attribute may only be overridden by another attribute", sym_node.node, ) # Second, collect attributes belonging to the current class. current_attr_names: set[str] = set() - kw_only = _get_decorator_bool_argument(ctx, "kw_only", self._spec.kw_only_default) + kw_only = self._get_bool_arg("kw_only", self._spec.kw_only_default) for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. @@ -442,7 +455,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: assert not isinstance(node, PlaceholderNode) if isinstance(node, TypeAlias): - ctx.api.fail( + self._api.fail( ("Type aliases inside dataclass definitions are not supported at runtime"), node, ) @@ -470,13 +483,13 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: if self._is_kw_only_type(node_type): kw_only = True - has_field_call, field_args = self._collect_field_args(stmt.rvalue, ctx) + has_field_call, field_args = self._collect_field_args(stmt.rvalue) is_in_init_param = field_args.get("init") if is_in_init_param is None: is_in_init = True else: - is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) + is_in_init = bool(self._api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected @@ -498,7 +511,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # kw_only value from the decorator parameter. field_kw_only_param = field_args.get("kw_only") if field_kw_only_param is not None: - is_kw_only = bool(ctx.api.parse_bool(field_kw_only_param)) + is_kw_only = bool(self._api.parse_bool(field_kw_only_param)) if sym.type is None and node.is_final and node.is_inferred: # This is a special case, assignment like x: Final = 42 is classified @@ -506,11 +519,11 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # We do not support inferred types in dataclasses, so we can try inferring # type for simple literals, and otherwise require an explicit type # argument for Final[...]. - typ = ctx.api.analyze_simple_literal_type(stmt.rvalue, is_final=True) + typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) if typ: node.type = typ else: - ctx.api.fail( + self._api.fail( "Need type argument for Final[...] with non-literal default in dataclass", stmt, ) @@ -545,19 +558,21 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: if found_default and attr.is_in_init and not attr.has_default and not attr.kw_only: # If the issue comes from merging different classes, report it # at the class definition point. - context: Context = ctx.cls + context: Context = cls if attr.name in current_attr_names: context = Context(line=attr.line, column=attr.column) - ctx.api.fail( + self._api.fail( "Attributes without a default cannot follow attributes with one", context ) found_default = found_default or (attr.has_default and attr.is_in_init) if found_kw_sentinel and self._is_kw_only_type(attr.type): - context = ctx.cls + context = cls if attr.name in current_attr_names: context = Context(line=attr.line, column=attr.column) - ctx.api.fail("There may not be more than one field with the KW_ONLY type", context) + self._api.fail( + "There may not be more than one field with the KW_ONLY type", context + ) found_kw_sentinel = found_kw_sentinel or self._is_kw_only_type(attr.type) return all_attrs @@ -565,7 +580,7 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None: """Converts all attributes to @property methods in order to emulate frozen classes. """ - info = self._ctx.cls.info + info = self._cls.info for attr in attributes: sym_node = info.names.get(attr.name) if sym_node is not None: @@ -589,7 +604,7 @@ def _propertize_callables( `self` argument (it is not). """ - info = self._ctx.cls.info + info = self._cls.info for attr in attributes: if isinstance(get_proper_type(attr.type), CallableType): var = attr.to_var(info) @@ -611,21 +626,19 @@ def _is_kw_only_type(self, node: Type | None) -> bool: def _add_dataclass_fields_magic_attribute(self) -> None: attr_name = "__dataclass_fields__" any_type = AnyType(TypeOfAny.explicit) - field_type = self._ctx.api.named_type_or_none("dataclasses.Field", [any_type]) or any_type - attr_type = self._ctx.api.named_type( - "builtins.dict", [self._ctx.api.named_type("builtins.str"), field_type] + field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type + attr_type = self._api.named_type( + "builtins.dict", [self._api.named_type("builtins.str"), field_type] ) var = Var(name=attr_name, type=attr_type) - var.info = self._ctx.cls.info - var._fullname = self._ctx.cls.info.fullname + "." + attr_name + var.info = self._cls.info + var._fullname = self._cls.info.fullname + "." + attr_name var.is_classvar = True - self._ctx.cls.info.names[attr_name] = SymbolTableNode( + self._cls.info.names[attr_name] = SymbolTableNode( kind=MDEF, node=var, plugin_generated=True ) - def _collect_field_args( - self, expr: Expression, ctx: ClassDefContext - ) -> tuple[bool, dict[str, Expression]]: + def _collect_field_args(self, expr: Expression) -> tuple[bool, dict[str, Expression]]: """Returns a tuple where the first value represents whether or not the expression is a call to dataclass.field and the second is a dictionary of the keyword arguments that field() was called with. @@ -646,13 +659,37 @@ def _collect_field_args( message = 'Unpacking **kwargs in "field()" is not supported' else: message = '"field()" does not accept positional arguments' - ctx.api.fail(message, expr) + self._api.fail(message, expr) return True, {} assert name is not None args[name] = arg return True, args return False, {} + def _get_bool_arg(self, name: str, default: bool) -> bool: + # Expressions are always CallExprs (either directly or via a wrapper like Decorator), so + # we can use the helpers from common + if isinstance(self._reason, Expression): + return _get_decorator_bool_argument( + ClassDefContext(self._cls, self._reason, self._api), name, default + ) + + # Subclass/metaclass use of `typing.dataclass_transform` reads the parameters from the + # class's keyword arguments (ie `class Subclass(Parent, kwarg1=..., kwarg2=...)`) + expression = self._cls.keywords.get(name) + if expression is not None: + value = self._api.parse_bool(self._cls.keywords[name]) + if value is not None: + return value + else: + self._api.fail(f'"{name}" argument must be True or False', expression) + return default + + +def add_dataclass_tag(info: TypeInfo) -> None: + # The value is ignored, only the existence matters. + info.metadata["dataclass_tag"] = {} + def dataclass_tag_callback(ctx: ClassDefContext) -> None: """Record that we have a dataclass in the main semantic analysis pass. @@ -660,13 +697,14 @@ def dataclass_tag_callback(ctx: ClassDefContext) -> None: The later pass implemented by DataclassTransformer will use this to detect dataclasses in base classes. """ - # The value is ignored, only the existence matters. - ctx.cls.info.metadata["dataclass_tag"] = {} + add_dataclass_tag(ctx.cls.info) def dataclass_class_maker_callback(ctx: ClassDefContext) -> bool: """Hooks into the class typechecking process to add support for dataclasses.""" - transformer = DataclassTransformer(ctx) + transformer = DataclassTransformer( + ctx.cls, ctx.reason, _get_transform_spec(ctx.reason), ctx.api + ) return transformer.transform() diff --git a/mypy/semanal.py b/mypy/semanal.py index cd5b82f80b1d..8dcea36f41b9 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1749,6 +1749,12 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: if hook: hook(ClassDefContext(defn, base_expr, self)) + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(defn) + if spec is not None: + dataclasses_plugin.add_dataclass_tag(defn.info) + def get_fullname_for_hook(self, expr: Expression) -> str | None: if isinstance(expr, CallExpr): return self.get_fullname_for_hook(expr.callee) @@ -1796,6 +1802,10 @@ def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None self.fail("@runtime_checkable can only be used with protocol classes", defn) elif decorator.fullname in FINAL_DECORATOR_NAMES: defn.info.is_final = True + elif isinstance(decorator, CallExpr) and refers_to_fullname( + decorator.callee, DATACLASS_TRANSFORM_NAMES + ): + defn.info.dataclass_transform_spec = self.parse_dataclass_transform_spec(decorator) def clean_up_bases_and_infer_type_variables( self, defn: ClassDef, base_type_exprs: list[Expression], context: Context diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 796a862c35e7..a5e85878e931 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -472,6 +472,16 @@ def apply_hooks_to_class( if hook: ok = ok and hook(ClassDefContext(defn, decorator, self)) + + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(info) + if spec is not None: + with self.file_context(file_node, options, info): + # We can't use the normal hook because reason = defn, and ClassDefContext only accepts + # an Expression for reason + ok = ok and dataclasses_plugin.DataclassTransformer(defn, defn, spec, self).transform() + return ok diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 05edf2ac073f..28ec8d0857ff 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -12,6 +12,7 @@ from mypy.errorcodes import ErrorCode from mypy.nodes import ( CallExpr, + ClassDef, Context, DataclassTransformSpec, Decorator, @@ -378,7 +379,32 @@ def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | # `@dataclass_transform(...)` syntax and never `@dataclass_transform` node = node.func + # For functions, we can directly consult the AST field for the spec if isinstance(node, FuncDef): return node.dataclass_transform_spec + if isinstance(node, ClassDef): + node = node.info + if isinstance(node, TypeInfo): + # Search all parent classes to see if any are decorated with `typing.dataclass_transform` + for base in node.mro[1:]: + if base.dataclass_transform_spec is not None: + return base.dataclass_transform_spec + + # Check if there is a metaclass that is decorated with `typing.dataclass_transform` + # + # Note that PEP 681 only discusses using a metaclass that is directly decorated with + # `typing.dataclass_transform`; subclasses thereof should be treated with dataclass + # semantics rather than as transforms: + # + # > If dataclass_transform is applied to a class, dataclass-like semantics will be assumed + # > for any class that directly or indirectly derives from the decorated class or uses the + # > decorated class as a metaclass. + # + # The wording doesn't make this entirely explicit, but Pyright (the reference + # implementation for this PEP) only handles directly-decorated metaclasses. + metaclass_type = node.metaclass_type + if metaclass_type is not None and metaclass_type.type.dataclass_transform_spec is not None: + return metaclass_type.type.dataclass_transform_spec + return None diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 01e8935b0745..075302762041 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -76,12 +76,19 @@ def my_dataclass(*, eq: bool = True, order: bool = False) -> Callable[[Type], Ty def transform(cls: Type) -> Type: return cls return transform +@dataclass_transform() +class BaseClass: + def __init_subclass__(cls, *, eq: bool): ... +@dataclass_transform() +class Metaclass(type): ... BOOL_CONSTANT = True @my_dataclass(eq=BOOL_CONSTANT) # E: "eq" argument must be True or False. class A: ... @my_dataclass(order=not False) # E: "order" argument must be True or False. class B: ... +class C(BaseClass, eq=BOOL_CONSTANT): ... # E: "eq" argument must be True or False +class D(metaclass=Metaclass, order=not False): ... # E: "order" argument must be True or False [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -202,3 +209,73 @@ Foo(5) [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaBaseClass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass: + def __init_subclass__(cls, *, kw_only: bool = False): ... + +class Person(Dataclass, kw_only=True): + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (*, name: builtins.str, age: builtins.int) -> __main__.Person" +Person('Jonh', 21) # E: Too many positional arguments for "Person" +person = Person(name='John', age=32) +person.name = "John Smith" # E: Property "name" defined in "Person" is read-only + +class Contact(Person): + email: str + +reveal_type(Contact) # N: Revealed type is "def (email: builtins.str, *, name: builtins.str, age: builtins.int) -> __main__.Contact" +Contact('john@john.com', name='John', age=32) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaMetaclass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass(type): ... + +class Person(metaclass=Dataclass, kw_only=True): + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (*, name: builtins.str, age: builtins.int) -> __main__.Person" +Person('Jonh', 21) # E: Too many positional arguments for "Person" +person = Person(name='John', age=32) +person.name = "John Smith" # E: Property "name" defined in "Person" is read-only + +class Contact(Person): + email: str + +reveal_type(Contact) # N: Revealed type is "def (email: builtins.str, *, name: builtins.str, age: builtins.int) -> __main__.Contact" +Contact('john@john.com', name='John', age=32) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaSubclassOfMetaclass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class BaseMeta(type): ... +class SubMeta(BaseMeta): ... + +# MyPy does *not* recognize this as a dataclass because the metaclass is not directly decorated with +# dataclass_transform +class Foo(metaclass=SubMeta): + foo: int + +reveal_type(Foo) # N: Revealed type is "def () -> __main__.Foo" +Foo(1) # E: Too many arguments for "Foo" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index ab692302a8b6..e9394c84ba7d 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -10,6 +10,7 @@ VT = TypeVar('VT') class object: def __init__(self) -> None: pass + def __init_subclass__(cls) -> None: pass def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass From 4192701c115de2f54fd57218308d4ed0e262effb Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Sat, 11 Feb 2023 00:40:54 -0800 Subject: [PATCH 269/292] Remove unused `imported_names` field (#14678) This PR removes the unused `imported_names` field from the `ImportAll` node class. Nothing outside of this class references it, so it should be safe to remove. --- mypy/nodes.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 2f2aa6a3efbe..94fc8f08f068 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -461,20 +461,17 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ImportAll(ImportBase): """from m import *""" - __slots__ = ("id", "relative", "imported_names") + __slots__ = ("id", "relative") __match_args__ = ("id", "relative") id: str relative: int - # NOTE: Only filled and used by old semantic analyzer. - imported_names: list[str] def __init__(self, id: str, relative: int) -> None: super().__init__() self.id = id self.relative = relative - self.imported_names = [] def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) From ad82257170636871a0062ebe6fb82fb45523f580 Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Mon, 13 Feb 2023 12:54:25 +0000 Subject: [PATCH 270/292] [dataclass_transform] support function overloads (#14651) Extends the existing decorator support to include overloads. This doesn't require much extra work: we just update `find_dataclass_transform_spec` to search for the first overload decorated with `dataclass_transform()`. --- mypy/semanal_shared.py | 12 +++++ test-data/unit/check-dataclass-transform.test | 54 +++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 28ec8d0857ff..dd069fbaec98 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -19,6 +19,7 @@ Expression, FuncDef, Node, + OverloadedFuncDef, RefExpr, SymbolNode, SymbolTable, @@ -379,6 +380,17 @@ def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | # `@dataclass_transform(...)` syntax and never `@dataclass_transform` node = node.func + if isinstance(node, OverloadedFuncDef): + # The dataclass_transform decorator may be attached to any single overload, so we must + # search them all. + # Note that using more than one decorator is undefined behavior, so we can just take the + # first that we find. + for candidate in node.items: + spec = find_dataclass_transform_spec(candidate) + if spec is not None: + return spec + return find_dataclass_transform_spec(node.impl) + # For functions, we can directly consult the AST field for the spec if isinstance(node, FuncDef): return node.dataclass_transform_spec diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 075302762041..40f3a4cde5fb 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -210,6 +210,60 @@ Foo(5) [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] +[case testDataclassTransformOverloadsDecoratorOnOverload] +# flags: --python-version 3.11 +from typing import dataclass_transform, overload, Any, Callable, Type, Literal + +@overload +def my_dataclass(*, foo: str) -> Callable[[Type], Type]: ... +@overload +@dataclass_transform(frozen_default=True) +def my_dataclass(*, foo: int) -> Callable[[Type], Type]: ... +def my_dataclass(*, foo: Any) -> Callable[[Type], Type]: + return lambda cls: cls +@my_dataclass(foo="hello") +class A: + a: int +@my_dataclass(foo=5) +class B: + b: int + +reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> __main__.A" +reveal_type(B) # N: Revealed type is "def (b: builtins.int) -> __main__.B" +A(1, "hello") # E: Too many arguments for "A" +a = A(1) +a.a = 2 # E: Property "a" defined in "A" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformOverloadsDecoratorOnImpl] +# flags: --python-version 3.11 +from typing import dataclass_transform, overload, Any, Callable, Type, Literal + +@overload +def my_dataclass(*, foo: str) -> Callable[[Type], Type]: ... +@overload +def my_dataclass(*, foo: int) -> Callable[[Type], Type]: ... +@dataclass_transform(frozen_default=True) +def my_dataclass(*, foo: Any) -> Callable[[Type], Type]: + return lambda cls: cls +@my_dataclass(foo="hello") +class A: + a: int +@my_dataclass(foo=5) +class B: + b: int + +reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> __main__.A" +reveal_type(B) # N: Revealed type is "def (b: builtins.int) -> __main__.B" +A(1, "hello") # E: Too many arguments for "A" +a = A(1) +a.a = 2 # E: Property "a" defined in "A" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + [case testDataclassTransformViaBaseClass] # flags: --python-version 3.11 from typing import dataclass_transform From 563e29dedc2a74af868076cce5c61d535cac3d6e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 13 Feb 2023 18:11:13 +0000 Subject: [PATCH 271/292] [mypyc] Fix test case testI64Cast on 32-bit architectures (#14691) Add 64-bit and 32-bit variants of the test. Fixes #14633. --- mypyc/test-data/irbuild-i64.test | 35 +++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index 253d1a837c7b..f616893d8fe5 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1770,7 +1770,7 @@ L1: L2: return 1 -[case testI64Cast] +[case testI64Cast_64bit] from typing import cast from mypy_extensions import i64 @@ -1811,6 +1811,39 @@ L2: L3: return r3 +[case testI64Cast_32bit] +from typing import cast +from mypy_extensions import i64 + +def cast_int(x: int) -> i64: + return cast(i64, x) +[out] +def cast_int(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3, r4 :: int64 + r5 :: ptr + r6 :: c_ptr + r7 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = extend signed x: builtins.int to int64 + r3 = r2 >> 1 + r4 = r3 + goto L3 +L2: + r5 = x ^ 1 + r6 = r5 + r7 = CPyLong_AsInt64(r6) + r4 = r7 + keep_alive x +L3: + return r4 + [case testI64ExplicitConversionFromVariousTypes] from mypy_extensions import i64 From 0b4ccaeb7dc282199e727978bc40d0fdde1897c9 Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Tue, 14 Feb 2023 13:18:25 +0000 Subject: [PATCH 272/292] consolidate literal bool argument error messages (#14693) Follow up on some of the recurring feedback from #14580 and #14657. There are many error messages similar to `X must be True or False.` in MyPy. This commit updates them all to: - remove the dangling period for consistency with other error messages - clarify that we need a `True` or `False` literal - use the `literal-required` error code for consistency with other literal errors This should have no impact outside of error message formatting. --- mypy/plugins/attrs.py | 7 ++- mypy/plugins/common.py | 12 ++--- mypy/plugins/dataclasses.py | 8 +--- mypy/semanal.py | 9 +++- mypy/semanal_shared.py | 45 +++++++++++++++++-- mypy/semanal_typeddict.py | 17 ++++--- test-data/unit/check-attr.test | 6 +-- test-data/unit/check-dataclass-transform.test | 8 ++-- test-data/unit/check-typeddict.test | 12 ++--- 9 files changed, 84 insertions(+), 40 deletions(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 50d2955d2584..6fda965ade8b 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -6,6 +6,7 @@ from typing_extensions import Final, Literal import mypy.plugin # To avoid circular imports. +from mypy.errorcodes import LITERAL_REQ from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type from mypy.nodes import ( ARG_NAMED, @@ -246,7 +247,11 @@ def _get_decorator_optional_bool_argument( return False if attr_value.fullname == "builtins.None": return None - ctx.api.fail(f'"{name}" argument must be True or False.', ctx.reason) + ctx.api.fail( + f'"{name}" argument must be a True, False, or None literal', + ctx.reason, + code=LITERAL_REQ, + ) return default return default else: diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 38109892e09d..0acf3e3a6369 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -20,7 +20,11 @@ Var, ) from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface -from mypy.semanal_shared import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name +from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, + require_bool_literal_argument, + set_callable_name, +) from mypy.typeops import ( # noqa: F401 # Part of public API try_getting_str_literals as try_getting_str_literals, ) @@ -54,11 +58,7 @@ def _get_bool_argument(ctx: ClassDefContext, expr: CallExpr, name: str, default: """ attr_value = _get_argument(expr, name) if attr_value: - ret = ctx.api.parse_bool(attr_value) - if ret is None: - ctx.api.fail(f'"{name}" argument must be True or False.', expr) - return default - return ret + return require_bool_literal_argument(ctx.api, attr_value, name, default) return default diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 3feb644dc8ea..872765847073 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -41,7 +41,7 @@ add_method_to_class, deserialize_and_fixup_type, ) -from mypy.semanal_shared import find_dataclass_transform_spec +from mypy.semanal_shared import find_dataclass_transform_spec, require_bool_literal_argument from mypy.server.trigger import make_wildcard_trigger from mypy.state import state from mypy.typeops import map_type_from_supertype @@ -678,11 +678,7 @@ def _get_bool_arg(self, name: str, default: bool) -> bool: # class's keyword arguments (ie `class Subclass(Parent, kwarg1=..., kwarg2=...)`) expression = self._cls.keywords.get(name) if expression is not None: - value = self._api.parse_bool(self._cls.keywords[name]) - if value is not None: - return value - else: - self._api.fail(f'"{name}" argument must be True or False', expression) + return require_bool_literal_argument(self._api, expression, name, default) return default diff --git a/mypy/semanal.py b/mypy/semanal.py index 8dcea36f41b9..8c16b0addd45 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -216,6 +216,7 @@ calculate_tuple_fallback, find_dataclass_transform_spec, has_placeholder, + require_bool_literal_argument, set_callable_name as set_callable_name, ) from mypy.semanal_typeddict import TypedDictAnalyzer @@ -6473,15 +6474,19 @@ def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSp typing.dataclass_transform.""" parameters = DataclassTransformSpec() for name, value in zip(call.arg_names, call.args): + # Skip any positional args. Note that any such args are invalid, but we can rely on + # typeshed to enforce this and don't need an additional error here. + if name is None: + continue + # field_specifiers is currently the only non-boolean argument; check for it first so # so the rest of the block can fail through to handling booleans if name == "field_specifiers": self.fail('"field_specifiers" support is currently unimplemented', call) continue - boolean = self.parse_bool(value) + boolean = require_bool_literal_argument(self, value, name) if boolean is None: - self.fail(f'"{name}" argument must be a True or False literal', call) continue if name == "eq_default": diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index dd069fbaec98..03efbe6ca1b8 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -3,13 +3,13 @@ from __future__ import annotations from abc import abstractmethod -from typing import Callable -from typing_extensions import Final, Protocol +from typing import Callable, overload +from typing_extensions import Final, Literal, Protocol from mypy_extensions import trait from mypy import join -from mypy.errorcodes import ErrorCode +from mypy.errorcodes import LITERAL_REQ, ErrorCode from mypy.nodes import ( CallExpr, ClassDef, @@ -26,6 +26,7 @@ SymbolTableNode, TypeInfo, ) +from mypy.plugin import SemanticAnalyzerPluginInterface from mypy.tvar_scope import TypeVarLikeScope from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery from mypy.types import ( @@ -420,3 +421,41 @@ def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | return metaclass_type.type.dataclass_transform_spec return None + + +# Never returns `None` if a default is given +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: Literal[True] | Literal[False], +) -> bool: + ... + + +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: None = None, +) -> bool | None: + ... + + +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: bool | None = None, +) -> bool | None: + """Attempt to interpret an expression as a boolean literal, and fail analysis if we can't.""" + value = api.parse_bool(expression) + if value is None: + api.fail( + f'"{name}" argument must be a True or False literal', expression, code=LITERAL_REQ + ) + return default + + return value diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 55618318c1e8..acb93edb7d2d 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -31,7 +31,11 @@ TypeInfo, ) from mypy.options import Options -from mypy.semanal_shared import SemanticAnalyzerInterface, has_placeholder +from mypy.semanal_shared import ( + SemanticAnalyzerInterface, + has_placeholder, + require_bool_literal_argument, +) from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type from mypy.types import ( TPDICT_NAMES, @@ -320,10 +324,7 @@ def analyze_typeddict_classdef_fields( self.fail("Right hand side values are not supported in TypedDict", stmt) total: bool | None = True if "total" in defn.keywords: - total = self.api.parse_bool(defn.keywords["total"]) - if total is None: - self.fail('Value of "total" must be True or False', defn) - total = True + total = require_bool_literal_argument(self.api, defn.keywords["total"], "total", True) required_keys = { field for (field, t) in zip(fields, types) @@ -436,11 +437,9 @@ def parse_typeddict_args( ) total: bool | None = True if len(args) == 3: - total = self.api.parse_bool(call.args[2]) + total = require_bool_literal_argument(self.api, call.args[2], "total") if total is None: - return self.fail_typeddict_arg( - 'TypedDict() "total" argument must be True or False', call - ) + return "", [], [], True, [], False dictexpr = args[1] tvar_defs = self.api.get_and_bind_all_tvars([t for k, t in dictexpr.items]) res = self.parse_typeddict_fields_with_types(dictexpr.items, call) diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index f555f2ea7011..f6ef289e792e 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -151,9 +151,9 @@ class D: [case testAttrsNotBooleans] import attr x = True -@attr.s(cmp=x) # E: "cmp" argument must be True or False. +@attr.s(cmp=x) # E: "cmp" argument must be a True, False, or None literal class A: - a = attr.ib(init=x) # E: "init" argument must be True or False. + a = attr.ib(init=x) # E: "init" argument must be a True or False literal [builtins fixtures/bool.pyi] [case testAttrsInitFalse] @@ -1866,4 +1866,4 @@ reveal_type(D) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> D(1, "").a = 2 # E: Cannot assign to final attribute "a" D(1, "").b = "2" # E: Cannot assign to final attribute "b" -[builtins fixtures/property.pyi] \ No newline at end of file +[builtins fixtures/property.pyi] diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 40f3a4cde5fb..bc8fe1ecf58c 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -83,12 +83,12 @@ class BaseClass: class Metaclass(type): ... BOOL_CONSTANT = True -@my_dataclass(eq=BOOL_CONSTANT) # E: "eq" argument must be True or False. +@my_dataclass(eq=BOOL_CONSTANT) # E: "eq" argument must be a True or False literal class A: ... -@my_dataclass(order=not False) # E: "order" argument must be True or False. +@my_dataclass(order=not False) # E: "order" argument must be a True or False literal class B: ... -class C(BaseClass, eq=BOOL_CONSTANT): ... # E: "eq" argument must be True or False -class D(metaclass=Metaclass, order=not False): ... # E: "order" argument must be True or False +class C(BaseClass, eq=BOOL_CONSTANT): ... # E: "eq" argument must be a True or False literal +class D(metaclass=Metaclass, order=not False): ... # E: "order" argument must be a True or False literal [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 1f200d168a55..e3d6188b643b 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -1084,8 +1084,8 @@ reveal_type(d) \ [case testTypedDictWithInvalidTotalArgument] from mypy_extensions import TypedDict -A = TypedDict('A', {'x': int}, total=0) # E: TypedDict() "total" argument must be True or False -B = TypedDict('B', {'x': int}, total=bool) # E: TypedDict() "total" argument must be True or False +A = TypedDict('A', {'x': int}, total=0) # E: "total" argument must be a True or False literal +B = TypedDict('B', {'x': int}, total=bool) # E: "total" argument must be a True or False literal C = TypedDict('C', {'x': int}, x=False) # E: Unexpected keyword argument "x" for "TypedDict" D = TypedDict('D', {'x': int}, False) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] @@ -1179,12 +1179,12 @@ reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {'x'?: builtins.in [case testTypedDictClassWithInvalidTotalArgument] from mypy_extensions import TypedDict -class D(TypedDict, total=1): # E: Value of "total" must be True or False +class D(TypedDict, total=1): # E: "total" argument must be a True or False literal x: int -class E(TypedDict, total=bool): # E: Value of "total" must be True or False +class E(TypedDict, total=bool): # E: "total" argument must be a True or False literal x: int -class F(TypedDict, total=xyz): # E: Value of "total" must be True or False \ - # E: Name "xyz" is not defined +class F(TypedDict, total=xyz): # E: Name "xyz" is not defined \ + # E: "total" argument must be a True or False literal x: int [builtins fixtures/dict.pyi] From ec511c63547430765ef03aadd9a67321c5373350 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 14 Feb 2023 13:49:27 +0000 Subject: [PATCH 273/292] Fix generic TypedDict/NamedTuple fixup (#14675) Fixes #14638 TBH I don't remember why do we need to create the "incomplete" type alias (with empty type variables), and set up type variables later. But I didn't want to risk a larger refactoring and just fixed the missing calls surfaced by the issue instead. --- mypy/fixup.py | 4 ++++ mypy/nodes.py | 14 ++++++++++-- test-data/unit/check-incremental.test | 31 +++++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 2 deletions(-) diff --git a/mypy/fixup.py b/mypy/fixup.py index 5f76cc1d1487..7b0f5f433d72 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -80,9 +80,13 @@ def visit_type_info(self, info: TypeInfo) -> None: if info.tuple_type: info.tuple_type.accept(self.type_fixer) info.update_tuple_type(info.tuple_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) info.update_typeddict_type(info.typeddict_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: diff --git a/mypy/nodes.py b/mypy/nodes.py index 94fc8f08f068..abf0379fd29a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3495,8 +3495,13 @@ def __init__( @classmethod def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: - """Generate an alias to the tuple type described by a given TypeInfo.""" + """Generate an alias to the tuple type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic tuple types), + they must be set by the caller (when fully analyzed). + """ assert info.tuple_type + # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( info.tuple_type.copy_modified(fallback=mypy.types.Instance(info, info.defn.type_vars)), info.fullname, @@ -3506,8 +3511,13 @@ def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: @classmethod def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias: - """Generate an alias to the TypedDict type described by a given TypeInfo.""" + """Generate an alias to the TypedDict type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic TypedDicts), + they must be set by the caller (when fully analyzed). + """ assert info.typeddict_type + # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( info.typeddict_type.copy_modified( fallback=mypy.types.Instance(info, info.defn.type_vars) diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 93d136936003..ec0c5d5e4805 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6372,3 +6372,34 @@ y: int = x [builtins fixtures/tuple.pyi] [out] [out2] + +[case testGenericTypedDictWithError] +import b +[file a.py] +from typing import Generic, TypeVar +from typing_extensions import TypedDict + +TValue = TypeVar("TValue") +class Dict(TypedDict, Generic[TValue]): + value: TValue + +[file b.py] +from a import Dict, TValue + +def f(d: Dict[TValue]) -> TValue: + return d["value"] +def g(d: Dict[TValue]) -> TValue: + return d["x"] + +[file b.py.2] +from a import Dict, TValue + +def f(d: Dict[TValue]) -> TValue: + return d["value"] +def g(d: Dict[TValue]) -> TValue: + return d["y"] +[builtins fixtures/dict.pyi] +[out] +tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "x" +[out2] +tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "y" From 4635a8c80f6b797c0ce3e53b909dcb4d5b175d1f Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Wed, 15 Feb 2023 12:55:14 +0000 Subject: [PATCH 274/292] [dataclass_transform] support field_specifiers (#14667) These are analogous to `dataclasses.field`/`dataclasses.Field`. Like most dataclass_transform features so far, this commit mostly just plumbs through the necessary metadata so that we can re-use the existing `dataclasses` plugin logic. It also adds support for the `alias=` and `factory=` kwargs for fields, which are small; we rely on typeshed to enforce that these aren't used with `dataclasses.field`. --- mypy/message_registry.py | 4 + mypy/plugin.py | 4 + mypy/plugins/dataclasses.py | 44 ++++++- mypy/semanal.py | 30 ++++- test-data/unit/check-dataclass-transform.test | 119 ++++++++++++++++++ 5 files changed, 196 insertions(+), 5 deletions(-) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 7827a2818be9..e00aca2869bd 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -270,3 +270,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: CLASS_PATTERN_UNKNOWN_KEYWORD: Final = 'Class "{}" has no attribute "{}"' MULTIPLE_ASSIGNMENTS_IN_PATTERN: Final = 'Multiple assignments to name "{}" in pattern' CANNOT_MODIFY_MATCH_ARGS: Final = 'Cannot assign to "__match_args__"' + +DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL: Final = ( + '"alias" argument to dataclass field must be a string literal' +) diff --git a/mypy/plugin.py b/mypy/plugin.py index 00a2af82969f..cf124b45d04f 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -297,6 +297,10 @@ def parse_bool(self, expr: Expression) -> bool | None: """Parse True/False literals.""" raise NotImplementedError + @abstractmethod + def parse_str_literal(self, expr: Expression) -> str | None: + """Parse string literals.""" + @abstractmethod def fail( self, diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 872765847073..6b1062d6457f 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -5,6 +5,7 @@ from typing import Optional from typing_extensions import Final +from mypy import errorcodes, message_registry from mypy.expandtype import expand_type from mypy.nodes import ( ARG_NAMED, @@ -77,6 +78,7 @@ class DataclassAttribute: def __init__( self, name: str, + alias: str | None, is_in_init: bool, is_init_var: bool, has_default: bool, @@ -87,6 +89,7 @@ def __init__( kw_only: bool, ) -> None: self.name = name + self.alias = alias self.is_in_init = is_in_init self.is_init_var = is_init_var self.has_default = has_default @@ -121,12 +124,13 @@ def expand_type(self, current_info: TypeInfo) -> Optional[Type]: return self.type def to_var(self, current_info: TypeInfo) -> Var: - return Var(self.name, self.expand_type(current_info)) + return Var(self.alias or self.name, self.expand_type(current_info)) def serialize(self) -> JsonDict: assert self.type return { "name": self.name, + "alias": self.alias, "is_in_init": self.is_in_init, "is_init_var": self.is_init_var, "has_default": self.has_default, @@ -495,7 +499,12 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: - has_default = "default" in field_args or "default_factory" in field_args + has_default = ( + "default" in field_args + or "default_factory" in field_args + # alias for default_factory defined in PEP 681 + or "factory" in field_args + ) # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): @@ -511,7 +520,11 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # kw_only value from the decorator parameter. field_kw_only_param = field_args.get("kw_only") if field_kw_only_param is not None: - is_kw_only = bool(self._api.parse_bool(field_kw_only_param)) + value = self._api.parse_bool(field_kw_only_param) + if value is not None: + is_kw_only = value + else: + self._api.fail('"kw_only" argument must be a boolean literal', stmt.rvalue) if sym.type is None and node.is_final and node.is_inferred: # This is a special case, assignment like x: Final = 42 is classified @@ -529,9 +542,20 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: ) node.type = AnyType(TypeOfAny.from_error) + alias = None + if "alias" in field_args: + alias = self._api.parse_str_literal(field_args["alias"]) + if alias is None: + self._api.fail( + message_registry.DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL, + stmt.rvalue, + code=errorcodes.LITERAL_REQ, + ) + current_attr_names.add(lhs.name) found_attrs[lhs.name] = DataclassAttribute( name=lhs.name, + alias=alias, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, @@ -624,6 +648,14 @@ def _is_kw_only_type(self, node: Type | None) -> bool: return node_type.type.fullname == "dataclasses.KW_ONLY" def _add_dataclass_fields_magic_attribute(self) -> None: + # Only add if the class is a dataclasses dataclass, and omit it for dataclass_transform + # classes. + # It would be nice if this condition were reified rather than using an `is` check. + # Only add if the class is a dataclasses dataclass, and omit it for dataclass_transform + # classes. + if self._spec is not _TRANSFORM_SPEC_FOR_DATACLASSES: + return + attr_name = "__dataclass_fields__" any_type = AnyType(TypeOfAny.explicit) field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type @@ -657,6 +689,12 @@ def _collect_field_args(self, expr: Expression) -> tuple[bool, dict[str, Express # the best we can do for now is not to fail. # TODO: we can infer what's inside `**` and try to collect it. message = 'Unpacking **kwargs in "field()" is not supported' + elif self._spec is not _TRANSFORM_SPEC_FOR_DATACLASSES: + # dataclasses.field can only be used with keyword args, but this + # restriction is only enforced for the *standardized* arguments to + # dataclass_transform field specifiers. If this is not a + # dataclasses.dataclass class, we can just skip positional args safely. + continue else: message = '"field()" does not accept positional arguments' self._api.fail(message, expr) diff --git a/mypy/semanal.py b/mypy/semanal.py index 8c16b0addd45..d2fd92499679 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -236,7 +236,7 @@ remove_dups, type_constructors, ) -from mypy.typeops import function_type, get_type_vars +from mypy.typeops import function_type, get_type_vars, try_getting_str_literals_from_type from mypy.types import ( ASSERT_TYPE_NAMES, DATACLASS_TRANSFORM_NAMES, @@ -6462,6 +6462,17 @@ def parse_bool(self, expr: Expression) -> bool | None: return False return None + def parse_str_literal(self, expr: Expression) -> str | None: + """Attempt to find the string literal value of the given expression. Returns `None` if no + literal value can be found.""" + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.type is not None: + values = try_getting_str_literals_from_type(expr.node.type) + if values is not None and len(values) == 1: + return values[0] + return None + def set_future_import_flags(self, module_name: str) -> None: if module_name in FUTURE_IMPORTS: self.modules[self.cur_mod_id].future_import_flags.add(FUTURE_IMPORTS[module_name]) @@ -6482,7 +6493,9 @@ def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSp # field_specifiers is currently the only non-boolean argument; check for it first so # so the rest of the block can fail through to handling booleans if name == "field_specifiers": - self.fail('"field_specifiers" support is currently unimplemented', call) + parameters.field_specifiers = self.parse_dataclass_transform_field_specifiers( + value + ) continue boolean = require_bool_literal_argument(self, value, name) @@ -6502,6 +6515,19 @@ def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSp return parameters + def parse_dataclass_transform_field_specifiers(self, arg: Expression) -> tuple[str, ...]: + if not isinstance(arg, TupleExpr): + self.fail('"field_specifiers" argument must be a tuple literal', arg) + return tuple() + + names = [] + for specifier in arg.items: + if not isinstance(specifier, RefExpr): + self.fail('"field_specifiers" must only contain identifiers', specifier) + return tuple() + names.append(specifier.fullname) + return tuple(names) + def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: if isinstance(sig, CallableType): diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index bc8fe1ecf58c..2a7fad1da992 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -210,6 +210,125 @@ Foo(5) [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] +[case testDataclassTransformFieldSpecifierRejectMalformed] +# flags: --python-version 3.11 +from typing import dataclass_transform, Any, Callable, Final, Type + +def some_type() -> Type: ... +def some_function() -> Callable[[], None]: ... + +def field(*args, **kwargs): ... +def fields_tuple() -> tuple[type | Callable[..., Any], ...]: return (field,) +CONSTANT: Final = (field,) + +@dataclass_transform(field_specifiers=(some_type(),)) # E: "field_specifiers" must only contain identifiers +def bad_dataclass1() -> None: ... +@dataclass_transform(field_specifiers=(some_function(),)) # E: "field_specifiers" must only contain identifiers +def bad_dataclass2() -> None: ... +@dataclass_transform(field_specifiers=CONSTANT) # E: "field_specifiers" argument must be a tuple literal +def bad_dataclass3() -> None: ... +@dataclass_transform(field_specifiers=fields_tuple()) # E: "field_specifiers" argument must be a tuple literal +def bad_dataclass4() -> None: ... + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifierParams] +# flags: --python-version 3.11 +from typing import dataclass_transform, Any, Callable, Type, Final + +def field( + *, + init: bool = True, + kw_only: bool = False, + alias: str | None = None, + default: Any | None = None, + default_factory: Callable[[], Any] | None = None, + factory: Callable[[], Any] | None = None, +): ... +@dataclass_transform(field_specifiers=(field,)) +def my_dataclass(cls: Type) -> Type: + return cls + +B: Final = 'b_' +@my_dataclass +class Foo: + a: int = field(alias='a_') + b: int = field(alias=B) + # cannot be passed as a positional + kwonly: int = field(kw_only=True, default=0) + # Safe to omit from constructor, error to pass + noinit: int = field(init=False, default=1) + # It should be safe to call the constructor without passing any of these + unused1: int = field(default=0) + unused2: int = field(factory=lambda: 0) + unused3: int = field(default_factory=lambda: 0) + +Foo(a=5, b_=1) # E: Unexpected keyword argument "a" for "Foo" +Foo(a_=1, b_=1, noinit=1) # E: Unexpected keyword argument "noinit" for "Foo" +Foo(1, 2, 3) # E: Too many positional arguments for "Foo" +foo = Foo(1, 2, kwonly=3) +reveal_type(foo.noinit) # N: Revealed type is "builtins.int" +reveal_type(foo.unused1) # N: Revealed type is "builtins.int" +Foo(a_=5, b_=1, unused1=2, unused2=3, unused3=4) + +def some_str() -> str: ... +def some_bool() -> bool: ... +@my_dataclass +class Bad: + bad1: int = field(alias=some_str()) # E: "alias" argument to dataclass field must be a string literal + bad2: int = field(kw_only=some_bool()) # E: "kw_only" argument must be a boolean literal + +# this metadata should only exist for dataclasses.dataclass classes +Foo.__dataclass_fields__ # E: "Type[Foo]" has no attribute "__dataclass_fields__" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifierExtraArgs] +# flags: --python-version 3.11 +from typing import dataclass_transform + +def field(extra1, *, kw_only=False, extra2=0): ... +@dataclass_transform(field_specifiers=(field,)) +def my_dataclass(cls): + return cls + +@my_dataclass +class Good: + a: int = field(5) + b: int = field(5, extra2=1) + c: int = field(5, kw_only=True) + +@my_dataclass +class Bad: + a: int = field(kw_only=True) # E: Missing positional argument "extra1" in call to "field" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformMultipleFieldSpecifiers] +# flags: --python-version 3.11 +from typing import dataclass_transform + +def field1(*, default: int) -> int: ... +def field2(*, default: str) -> str: ... + +@dataclass_transform(field_specifiers=(field1, field2)) +def my_dataclass(cls): return cls + +@my_dataclass +class Foo: + a: int = field1(default=0) + b: str = field2(default='hello') + +reveal_type(Foo) # N: Revealed type is "def (a: builtins.int =, b: builtins.str =) -> __main__.Foo" +Foo() +Foo(a=1, b='bye') + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + [case testDataclassTransformOverloadsDecoratorOnOverload] # flags: --python-version 3.11 from typing import dataclass_transform, overload, Any, Callable, Type, Literal From 0bbeab8b26825ee94b8fdeda16f2aa589be4282d Mon Sep 17 00:00:00 2001 From: "Michael R. Crusoe" <1330696+mr-c@users.noreply.github.com> Date: Wed, 15 Feb 2023 22:06:24 +0100 Subject: [PATCH 275/292] Test with 32-bit Python (#14634) Debian does build 32bit (on Linux), and there was a recent regression This PR would have caught #14633 earlier No change in total CI time [(32 minutes)](https://github.com/python/mypy/actions/runs/4174210017) versus the baseline [(33 minutes)](https://github.com/python/mypy/actions/runs/4166467338) Confirmation that the new CI test catches the previous error is at https://github.com/python/mypy/actions/runs/4174055572/jobs/7227150570#step:7:44 --- .github/workflows/test.yml | 46 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e7072f5369c2..ed0c82ef5fa1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -155,3 +155,49 @@ jobs: continue-on-error: true - name: Mark as a success run: exit 0 + + python_32bits: + runs-on: ubuntu-latest + name: Test mypyc suite with 32-bit Python + env: + TOX_SKIP_MISSING_INTERPRETERS: False + # Rich (pip) + FORCE_COLOR: 1 + # Tox + PY_COLORS: 1 + # Mypy (see https://github.com/python/mypy/issues/7771) + TERM: xterm-color + MYPY_FORCE_COLOR: 1 + MYPY_FORCE_TERMINAL_WIDTH: 200 + # Pytest + PYTEST_ADDOPTS: --color=yes + CXX: i686-linux-gnu-g++ + CC: i686-linux-gnu-gcc + steps: + - uses: actions/checkout@v3 + - name: Install 32-bit build dependencies + run: | + sudo dpkg --add-architecture i386 && \ + sudo apt-get update && sudo apt-get install -y \ + zlib1g-dev:i386 \ + g++-i686-linux-gnu \ + gcc-i686-linux-gnu \ + libffi-dev:i386 \ + libssl-dev:i386 \ + libbz2-dev:i386 \ + libncurses-dev:i386 \ + libreadline-dev:i386 \ + libsqlite3-dev:i386 \ + liblzma-dev:i386 \ + uuid-dev:i386 + - name: Compile, install, and activate 32-bit Python + uses: gabrielfalcao/pyenv-action@v13 + with: + default: 3.11.1 + command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');" + - name: Install tox + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + - name: Setup tox environment + run: tox run -e py --notest + - name: Test + run: tox run -e py --skip-pkg-install -- -n 2 mypyc/test/ From 7237831d64c051b2d6e4d99970f9b6ccf7a7bfce Mon Sep 17 00:00:00 2001 From: Richard Si Date: Thu, 16 Feb 2023 05:05:06 -0500 Subject: [PATCH 276/292] [mypyc] (Re-)Support iterating over an Union of dicts (#14713) An optimization to make iterating over dict.keys(), dict.values() and dict.items() faster caused mypyc to crash while compiling a Union of dictionaries. This commit fixes the optimization helpers to properly handle unions. irbuild.Builder.get_dict_base_type() now returns list[Instance] with the union items. In the common case we don't have a union, a single-element list is returned. And get_dict_key_type() and get_dict_value_type() will now build a simplified RUnion as needed. Fixes https://github.com/mypyc/mypyc/issues/965 and probably #14694. --- mypyc/codegen/literals.py | 5 ++- mypyc/irbuild/builder.py | 32 ++++++++++++----- mypyc/test-data/irbuild-dict.test | 58 ++++++++++++++++++++++++++++++- 3 files changed, 83 insertions(+), 12 deletions(-) diff --git a/mypyc/codegen/literals.py b/mypyc/codegen/literals.py index 784a8ed27c4e..05884b754452 100644 --- a/mypyc/codegen/literals.py +++ b/mypyc/codegen/literals.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, FrozenSet, List, Tuple, Union, cast +from typing import Any, FrozenSet, List, Tuple, Union, cast from typing_extensions import Final # Supported Python literal types. All tuple / frozenset items must have supported @@ -151,8 +151,7 @@ def _encode_collection_values( ... """ - # FIXME: https://github.com/mypyc/mypyc/issues/965 - value_by_index = {index: value for value, index in cast(Dict[Any, int], values).items()} + value_by_index = {index: value for value, index in values.items()} result = [] count = len(values) result.append(str(count)) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index f2a70d4e8691..f37fae608083 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -879,23 +879,39 @@ def get_sequence_type_from_type(self, target_type: Type) -> RType: else: return self.type_to_rtype(target_type.args[0]) - def get_dict_base_type(self, expr: Expression) -> Instance: + def get_dict_base_type(self, expr: Expression) -> list[Instance]: """Find dict type of a dict-like expression. This is useful for dict subclasses like SymbolTable. """ target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance), target_type - dict_base = next(base for base in target_type.type.mro if base.fullname == "builtins.dict") - return map_instance_to_supertype(target_type, dict_base) + if isinstance(target_type, UnionType): + types = [get_proper_type(item) for item in target_type.items] + else: + types = [target_type] + + dict_types = [] + for t in types: + assert isinstance(t, Instance), t + dict_base = next(base for base in t.type.mro if base.fullname == "builtins.dict") + dict_types.append(map_instance_to_supertype(t, dict_base)) + return dict_types def get_dict_key_type(self, expr: Expression) -> RType: - dict_base_type = self.get_dict_base_type(expr) - return self.type_to_rtype(dict_base_type.args[0]) + dict_base_types = self.get_dict_base_type(expr) + if len(dict_base_types) == 1: + return self.type_to_rtype(dict_base_types[0].args[0]) + else: + rtypes = [self.type_to_rtype(t.args[0]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) def get_dict_value_type(self, expr: Expression) -> RType: - dict_base_type = self.get_dict_base_type(expr) - return self.type_to_rtype(dict_base_type.args[1]) + dict_base_types = self.get_dict_base_type(expr) + if len(dict_base_types) == 1: + return self.type_to_rtype(dict_base_types[0].args[1]) + else: + rtypes = [self.type_to_rtype(t.args[1]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) def get_dict_item_type(self, expr: Expression) -> RType: key_type = self.get_dict_key_type(expr) diff --git a/mypyc/test-data/irbuild-dict.test b/mypyc/test-data/irbuild-dict.test index 3e2c295637ab..99643b9451f0 100644 --- a/mypyc/test-data/irbuild-dict.test +++ b/mypyc/test-data/irbuild-dict.test @@ -218,13 +218,17 @@ L0: return r2 [case testDictIterationMethods] -from typing import Dict +from typing import Dict, Union def print_dict_methods(d1: Dict[int, int], d2: Dict[int, int]) -> None: for v in d1.values(): if v in d2: return for k, v in d2.items(): d2[k] += v +def union_of_dicts(d: Union[Dict[str, int], Dict[str, str]]) -> None: + new = {} + for k, v in d.items(): + new[k] = int(v) [out] def print_dict_methods(d1, d2): d1, d2 :: dict @@ -314,6 +318,58 @@ L11: r34 = CPy_NoErrOccured() L12: return 1 +def union_of_dicts(d): + d, r0, new :: dict + r1 :: short_int + r2 :: native_int + r3 :: short_int + r4 :: object + r5 :: tuple[bool, short_int, object, object] + r6 :: short_int + r7 :: bool + r8, r9 :: object + r10 :: str + r11 :: union[int, str] + k :: str + v :: union[int, str] + r12, r13 :: object + r14 :: int + r15 :: object + r16 :: int32 + r17, r18, r19 :: bit +L0: + r0 = PyDict_New() + new = r0 + r1 = 0 + r2 = PyDict_Size(d) + r3 = r2 << 1 + r4 = CPyDict_GetItemsIter(d) +L1: + r5 = CPyDict_NextItem(r4, r1) + r6 = r5[1] + r1 = r6 + r7 = r5[0] + if r7 goto L2 else goto L4 :: bool +L2: + r8 = r5[2] + r9 = r5[3] + r10 = cast(str, r8) + r11 = cast(union[int, str], r9) + k = r10 + v = r11 + r12 = load_address PyLong_Type + r13 = PyObject_CallFunctionObjArgs(r12, v, 0) + r14 = unbox(int, r13) + r15 = box(int, r14) + r16 = CPyDict_SetItem(new, k, r15) + r17 = r16 >= 0 :: signed +L3: + r18 = CPyDict_CheckSize(d, r3) + goto L1 +L4: + r19 = CPy_NoErrOccured() +L5: + return 1 [case testDictLoadAddress] def f() -> None: From d5860707544bdb3e51442bf6bd97b971f8c01d61 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Thu, 16 Feb 2023 02:54:11 -0800 Subject: [PATCH 277/292] Give arguments a more reasonable location (#14562) Modifies arguments parsed from the AST to use the AST's row and column information. Modifies function definitions to not overwrite their arguments' locations. Modifies incorrect override messages to use the locations of arguments instead of the method itself. Modifies tests to expect the new locations. I'm not sure whether this handles bound arguments correctly; it passes tests but I don't know whether there's some edge case I'm missing. Fixes #8298. --- mypy/checker.py | 9 +++- mypy/errors.py | 18 ++++---- mypy/fastparse.py | 9 +++- mypy/messages.py | 55 ++++++++++++++++++++---- mypy/nodes.py | 12 ------ mypy/test/data.py | 4 +- test-data/unit/check-abstract.test | 2 +- test-data/unit/check-classes.test | 55 +++++++++++++++++++++++- test-data/unit/check-columns.test | 6 +-- test-data/unit/daemon.test | 2 +- test-data/unit/fine-grained-inspect.test | 6 +-- 11 files changed, 135 insertions(+), 43 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 8e1de9a07b4c..4bf009f74092 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2067,7 +2067,13 @@ def erase_override(t: Type) -> Type: if not is_subtype( original.arg_types[i], erase_override(override.arg_types[i]) ): + arg_type_in_super = original.arg_types[i] + + if isinstance(node, FuncDef): + context: Context = node.arguments[i + len(override.bound_args)] + else: + context = node self.msg.argument_incompatible_with_supertype( i + 1, name, @@ -2075,7 +2081,8 @@ def erase_override(t: Type) -> Type: name_in_super, arg_type_in_super, supertype, - node, + context, + secondary_context=node, ) emitted_msg = True diff --git a/mypy/errors.py b/mypy/errors.py index ee1fa137dfe4..2c2c1e5ca227 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -4,7 +4,7 @@ import sys import traceback from collections import defaultdict -from typing import Callable, NoReturn, Optional, TextIO, Tuple, TypeVar +from typing import Callable, Iterable, NoReturn, Optional, TextIO, Tuple, TypeVar from typing_extensions import Final, Literal, TypeAlias as _TypeAlias from mypy import errorcodes as codes @@ -78,7 +78,7 @@ class ErrorInfo: # Actual origin of the error message as tuple (path, line number, end line number) # If end line number is unknown, use line number. - origin: tuple[str, int, int] + origin: tuple[str, Iterable[int]] # Fine-grained incremental target where this was reported target: str | None = None @@ -104,7 +104,7 @@ def __init__( blocker: bool, only_once: bool, allow_dups: bool, - origin: tuple[str, int, int] | None = None, + origin: tuple[str, Iterable[int]] | None = None, target: str | None = None, ) -> None: self.import_ctx = import_ctx @@ -122,7 +122,7 @@ def __init__( self.blocker = blocker self.only_once = only_once self.allow_dups = allow_dups - self.origin = origin or (file, line, line) + self.origin = origin or (file, [line]) self.target = target @@ -367,7 +367,7 @@ def report( file: str | None = None, only_once: bool = False, allow_dups: bool = False, - origin_span: tuple[int, int] | None = None, + origin_span: Iterable[int] | None = None, offset: int = 0, end_line: int | None = None, end_column: int | None = None, @@ -411,7 +411,7 @@ def report( message = " " * offset + message if origin_span is None: - origin_span = (line, line) + origin_span = [line] if end_line is None: end_line = line @@ -434,7 +434,7 @@ def report( blocker, only_once, allow_dups, - origin=(self.file, *origin_span), + origin=(self.file, origin_span), target=self.current_target(), ) self.add_error_info(info) @@ -467,7 +467,7 @@ def _filter_error(self, file: str, info: ErrorInfo) -> bool: return False def add_error_info(self, info: ErrorInfo) -> None: - file, line, end_line = info.origin + file, lines = info.origin # process the stack of ErrorWatchers before modifying any internal state # in case we need to filter out the error entirely # NB: we need to do this both here and in _add_error_info, otherwise we @@ -478,7 +478,7 @@ def add_error_info(self, info: ErrorInfo) -> None: if file in self.ignored_lines: # Check each line in this context for "type: ignore" comments. # line == end_line for most nodes, so we only loop once. - for scope_line in range(line, end_line + 1): + for scope_line in lines: if self.is_ignored_error(scope_line, info, self.ignored_lines[file]): # Annotation requests us to ignore all errors on this line. self.used_ignored_lines[file][scope_line].append( diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 209ebb89f36b..ef1fdf61af2e 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1081,7 +1081,14 @@ def make_argument( if argument_elide_name(arg.arg): pos_only = True - return Argument(Var(arg.arg), arg_type, self.visit(default), kind, pos_only) + argument = Argument(Var(arg.arg), arg_type, self.visit(default), kind, pos_only) + argument.set_line( + arg.lineno, + arg.col_offset, + getattr(arg, "end_lineno", None), + getattr(arg, "end_col_offset", None), + ) + return argument def fail_arg(self, msg: str, arg: ast3.arg) -> None: self.fail(msg, arg.lineno, arg.col_offset) diff --git a/mypy/messages.py b/mypy/messages.py index 7716e1323e9f..ba2508033790 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -12,6 +12,7 @@ from __future__ import annotations import difflib +import itertools import re from contextlib import contextmanager from textwrap import dedent @@ -208,6 +209,7 @@ def report( origin: Context | None = None, offset: int = 0, allow_dups: bool = False, + secondary_context: Context | None = None, ) -> None: """Report an error or note (unless disabled). @@ -215,7 +217,7 @@ def report( where # type: ignore comments have effect. """ - def span_from_context(ctx: Context) -> tuple[int, int]: + def span_from_context(ctx: Context) -> Iterable[int]: """This determines where a type: ignore for a given context has effect. Current logic is a bit tricky, to keep as much backwards compatibility as @@ -223,19 +225,24 @@ def span_from_context(ctx: Context) -> tuple[int, int]: simplify it) when we drop Python 3.7. """ if isinstance(ctx, (ClassDef, FuncDef)): - return ctx.deco_line or ctx.line, ctx.line + return range(ctx.deco_line or ctx.line, ctx.line + 1) elif not isinstance(ctx, Expression): - return ctx.line, ctx.line + return [ctx.line] else: - return ctx.line, ctx.end_line or ctx.line + return range(ctx.line, (ctx.end_line or ctx.line) + 1) - origin_span: tuple[int, int] | None + origin_span: Iterable[int] | None if origin is not None: origin_span = span_from_context(origin) elif context is not None: origin_span = span_from_context(context) else: origin_span = None + + if secondary_context is not None: + assert origin_span is not None + origin_span = itertools.chain(origin_span, span_from_context(secondary_context)) + self.errors.report( context.line if context else -1, context.column if context else -1, @@ -258,9 +265,18 @@ def fail( code: ErrorCode | None = None, file: str | None = None, allow_dups: bool = False, + secondary_context: Context | None = None, ) -> None: """Report an error message (unless disabled).""" - self.report(msg, context, "error", code=code, file=file, allow_dups=allow_dups) + self.report( + msg, + context, + "error", + code=code, + file=file, + allow_dups=allow_dups, + secondary_context=secondary_context, + ) def note( self, @@ -272,6 +288,7 @@ def note( allow_dups: bool = False, *, code: ErrorCode | None = None, + secondary_context: Context | None = None, ) -> None: """Report a note (unless disabled).""" self.report( @@ -283,6 +300,7 @@ def note( offset=offset, allow_dups=allow_dups, code=code, + secondary_context=secondary_context, ) def note_multiline( @@ -293,11 +311,20 @@ def note_multiline( offset: int = 0, allow_dups: bool = False, code: ErrorCode | None = None, + *, + secondary_context: Context | None = None, ) -> None: """Report as many notes as lines in the message (unless disabled).""" for msg in messages.splitlines(): self.report( - msg, context, "note", file=file, offset=offset, allow_dups=allow_dups, code=code + msg, + context, + "note", + file=file, + offset=offset, + allow_dups=allow_dups, + code=code, + secondary_context=secondary_context, ) # @@ -1151,6 +1178,7 @@ def argument_incompatible_with_supertype( arg_type_in_supertype: Type, supertype: str, context: Context, + secondary_context: Context, ) -> None: target = self.override_target(name, name_in_supertype, supertype) arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype) @@ -1161,17 +1189,26 @@ def argument_incompatible_with_supertype( ), context, code=codes.OVERRIDE, + secondary_context=secondary_context, + ) + self.note( + "This violates the Liskov substitution principle", + context, + code=codes.OVERRIDE, + secondary_context=secondary_context, ) - self.note("This violates the Liskov substitution principle", context, code=codes.OVERRIDE) self.note( "See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides", context, code=codes.OVERRIDE, + secondary_context=secondary_context, ) if name == "__eq__" and type_name: multiline_msg = self.comparison_method_example_msg(class_name=type_name) - self.note_multiline(multiline_msg, context, code=codes.OVERRIDE) + self.note_multiline( + multiline_msg, context, code=codes.OVERRIDE, secondary_context=secondary_context + ) def comparison_method_example_msg(self, class_name: str) -> str: return dedent( diff --git a/mypy/nodes.py b/mypy/nodes.py index abf0379fd29a..4787930214f3 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -701,18 +701,6 @@ def __init__( def max_fixed_argc(self) -> int: return self.max_pos - def set_line( - self, - target: Context | int, - column: int | None = None, - end_line: int | None = None, - end_column: int | None = None, - ) -> None: - super().set_line(target, column, end_line, end_column) - for arg in self.arguments: - # TODO: set arguments line/column to their precise locations. - arg.set_line(self.line, self.column, self.end_line, end_column) - def is_dynamic(self) -> bool: return self.type is None diff --git a/mypy/test/data.py b/mypy/test/data.py index c6f671b2d401..535ebf304784 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -676,8 +676,8 @@ class DataFileCollector(pytest.Collector): parent: DataSuiteCollector @classmethod # We have to fight with pytest here: - def from_parent( # type: ignore[override] - cls, parent: DataSuiteCollector, *, name: str + def from_parent( + cls, parent: DataSuiteCollector, *, name: str # type: ignore[override] ) -> DataFileCollector: collector = super().from_parent(parent, name=name) assert isinstance(collector, DataFileCollector) diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index 98be314b9c27..566bb92d6e18 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -382,10 +382,10 @@ class A(I): def g(self, a: 'A') -> 'A': return A() [out] +main:11: error: Return type "I" of "h" incompatible with return type "A" in supertype "I" main:11: error: Argument 1 of "h" is incompatible with supertype "I"; supertype defines the argument type as "I" main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides -main:11: error: Return type "I" of "h" incompatible with return type "A" in supertype "I" -- Accessing abstract members diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index f1af13923fd7..d5fb830487e8 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -331,6 +331,59 @@ main:7: note: This violates the Liskov substitution principle main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:9: error: Return type "object" of "h" incompatible with return type "A" in supertype "A" +[case testMethodOverridingWithIncompatibleTypesOnMultipleLines] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( + self, + x: int, + y: bool, + ) -> None: + pass +[out] +main:7: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" +main:7: note: This violates the Liskov substitution principle +main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + +[case testMultiLineMethodOverridingWithIncompatibleTypesIgnorableAtArgument] +class A: + def f(self, x: int, y: str) -> None: pass + +class B(A): + def f( + self, + x: int, + y: bool, # type: ignore[override] + ) -> None: + pass + +[case testMultiLineMethodOverridingWithIncompatibleTypesIgnorableAtDefinition] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( # type: ignore[override] + self, + x: int, + y: bool, + ) -> None: + pass + +[case testMultiLineMethodOverridingWithIncompatibleTypesWrongIgnore] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( # type: ignore[return-type] + self, + x: int, + y: bool, + ) -> None: + pass +[out] +main:7: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" +main:7: note: This violates the Liskov substitution principle +main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + [case testEqMethodsOverridingWithNonObjects] class A: def __eq__(self, other: A) -> bool: pass # Fail @@ -2626,10 +2679,10 @@ class D(A): def __iadd__(self, x: 'A') -> 'B': pass [out] main:6: error: Return type "A" of "__iadd__" incompatible with return type "B" in "__add__" of supertype "A" +main:8: error: Signatures of "__iadd__" and "__add__" are incompatible main:8: error: Argument 1 of "__iadd__" is incompatible with "__add__" of supertype "A"; supertype defines the argument type as "A" main:8: note: This violates the Liskov substitution principle main:8: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides -main:8: error: Signatures of "__iadd__" and "__add__" are incompatible [case testGetattribute] diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 6748646b65aa..9691e6565689 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -238,9 +238,9 @@ if int(): class A: def f(self, x: int) -> None: pass class B(A): - def f(self, x: str) -> None: pass # E:5: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" \ - # N:5: This violates the Liskov substitution principle \ - # N:5: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + def f(self, x: str) -> None: pass # E:17: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" \ + # N:17: This violates the Liskov substitution principle \ + # N:17: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides class C(A): def f(self, x: int) -> int: pass # E:5: Return type "int" of "f" incompatible with return type "None" in supertype "A" class D(A): diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index c72dc3a32bc7..7586c8763d33 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -490,7 +490,7 @@ bar/baz.py:4:5:attr $ dmypy inspect foo.py:10:10 --show definition --include-span 10:1:10:12 -> bar/baz.py:6:1:test $ dmypy inspect foo.py:14:6 --show definition --include-span --include-kind -NameExpr:14:5:14:7 -> foo.py:13:1:arg +NameExpr:14:5:14:7 -> foo.py:13:9:arg MemberExpr:14:5:14:9 -> bar/baz.py:9:5:x, bar/baz.py:11:5:x [file foo.py] diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test index 8574477d8272..2c575ec365b1 100644 --- a/test-data/unit/fine-grained-inspect.test +++ b/test-data/unit/fine-grained-inspect.test @@ -189,7 +189,7 @@ def foo(arg: T) -> T: return arg [out] == -foo.py:7:1:arg +foo.py:7:9:arg foo.py:4:5:x [case testInspectTypeVarValuesDef] @@ -219,7 +219,7 @@ class C(Generic[T]): [out] == foo.py:5:5:z, tmp/foo.py:9:5:z -foo.py:12:1:arg +foo.py:12:9:arg foo.py:5:5:z, tmp/foo.py:9:5:z [case testInspectModuleAttrs] @@ -266,4 +266,4 @@ def foo(arg: int) -> int: [out] == -4:12:4:14 -> tmp/foo.py:1:1:arg +4:12:4:14 -> tmp/foo.py:1:9:arg From bcf60ac7b7aba645bdbb2bf803d43c51f9346197 Mon Sep 17 00:00:00 2001 From: Richard Si Date: Thu, 16 Feb 2023 06:40:50 -0500 Subject: [PATCH 278/292] [mypyc] Support __pow__, __rpow__, and __ipow__ dunders (#14616) Unlike every other slot, power slots are ternary. Some special casing had to be done in generate_bin_op_wrapper() to support the third slot argument. Annoyingly, pow() also has these unique behaviours: - Ternary pow() does NOT fallback to `__rpow__` if `__pow__` returns `NotImplemented` unlike binary ops. - Ternary pow() does NOT try the right operand's `__rpow__` first if it's a subclass of the left operand and redefines `__rpow__` unlike binary ops. Add in the fact it's allowed and common to only define `__(r|i)pow__` to take two arguments (actually mypy won't let you define `__rpow__` to take three arguments) and the patch becomes frustratingly non-trivial. Towards https://github.com/mypyc/mypyc/issues/553. Fixes https://github.com/mypyc/mypyc/issues/907. --- mypyc/codegen/emitclass.py | 6 ++ mypyc/codegen/emitwrapper.py | 105 +++++++++++++++++++++---- mypyc/lib-rt/CPy.h | 1 + mypyc/lib-rt/generic_ops.c | 5 ++ mypyc/primitives/generic_ops.py | 27 +++++-- mypyc/test-data/fixtures/ir.py | 22 ++++++ mypyc/test-data/irbuild-any.test | 25 ++++++ mypyc/test-data/run-dunders.test | 131 +++++++++++++++++++++++++++++++ 8 files changed, 297 insertions(+), 25 deletions(-) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 15935c3b79f2..a9b51b8ff1a4 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -13,6 +13,7 @@ generate_dunder_wrapper, generate_get_wrapper, generate_hash_wrapper, + generate_ipow_wrapper, generate_len_wrapper, generate_richcompare_wrapper, generate_set_del_item_wrapper, @@ -109,6 +110,11 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: "__ior__": ("nb_inplace_or", generate_dunder_wrapper), "__ixor__": ("nb_inplace_xor", generate_dunder_wrapper), "__imatmul__": ("nb_inplace_matrix_multiply", generate_dunder_wrapper), + # Ternary operations. (yes, really) + # These are special cased in generate_bin_op_wrapper(). + "__pow__": ("nb_power", generate_bin_op_wrapper), + "__rpow__": ("nb_power", generate_bin_op_wrapper), + "__ipow__": ("nb_inplace_power", generate_ipow_wrapper), } AS_ASYNC_SLOT_DEFS: SlotTable = { diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index 1fa1e8548e07..ed03bb7948cc 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -301,6 +301,32 @@ def generate_dunder_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: return gen.wrapper_name() +def generate_ipow_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generate a wrapper for native __ipow__. + + Since __ipow__ fills a ternary slot, but almost no one defines __ipow__ to take three + arguments, the wrapper needs to tweaked to force it to accept three arguments. + """ + gen = WrapperGenerator(cl, emitter) + gen.set_target(fn) + assert len(fn.args) in (2, 3), "__ipow__ should only take 2 or 3 arguments" + gen.arg_names = ["self", "exp", "mod"] + gen.emit_header() + gen.emit_arg_processing() + handle_third_pow_argument( + fn, + emitter, + gen, + if_unsupported=[ + 'PyErr_SetString(PyExc_TypeError, "__ipow__ takes 2 positional arguments but 3 were given");', + "return NULL;", + ], + ) + gen.emit_call() + gen.finish() + return gen.wrapper_name() + + def generate_bin_op_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for a native binary dunder method. @@ -311,13 +337,16 @@ def generate_bin_op_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """ gen = WrapperGenerator(cl, emitter) gen.set_target(fn) - gen.arg_names = ["left", "right"] + if fn.name in ("__pow__", "__rpow__"): + gen.arg_names = ["left", "right", "mod"] + else: + gen.arg_names = ["left", "right"] wrapper_name = gen.wrapper_name() gen.emit_header() if fn.name not in reverse_op_methods and fn.name in reverse_op_method_names: # There's only a reverse operator method. - generate_bin_op_reverse_only_wrapper(emitter, gen) + generate_bin_op_reverse_only_wrapper(fn, emitter, gen) else: rmethod = reverse_op_methods[fn.name] fn_rev = cl.get_method(rmethod) @@ -334,6 +363,7 @@ def generate_bin_op_forward_only_wrapper( fn: FuncIR, emitter: Emitter, gen: WrapperGenerator ) -> None: gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) gen.emit_call(not_implemented_handler="goto typefail;") gen.emit_error_handling() emitter.emit_label("typefail") @@ -352,19 +382,16 @@ def generate_bin_op_forward_only_wrapper( # if not isinstance(other, int): # return NotImplemented # ... - rmethod = reverse_op_methods[fn.name] - emitter.emit_line(f"_Py_IDENTIFIER({rmethod});") - emitter.emit_line( - 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( - op_methods_to_symbols[fn.name], rmethod - ) - ) + generate_bin_op_reverse_dunder_call(fn, emitter, reverse_op_methods[fn.name]) gen.finish() -def generate_bin_op_reverse_only_wrapper(emitter: Emitter, gen: WrapperGenerator) -> None: +def generate_bin_op_reverse_only_wrapper( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator +) -> None: gen.arg_names = ["right", "left"] gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) gen.emit_call() gen.emit_error_handling() emitter.emit_label("typefail") @@ -390,7 +417,14 @@ def generate_bin_op_both_wrappers( ) ) gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) - gen.emit_call(not_implemented_handler="goto typefail;") + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail2;"]) + # Ternary __rpow__ calls aren't a thing so immediately bail + # if ternary __pow__ returns NotImplemented. + if fn.name == "__pow__" and len(fn.args) == 3: + fwd_not_implemented_handler = "goto typefail2;" + else: + fwd_not_implemented_handler = "goto typefail;" + gen.emit_call(not_implemented_handler=fwd_not_implemented_handler) gen.emit_error_handling() emitter.emit_line("}") emitter.emit_label("typefail") @@ -402,15 +436,11 @@ def generate_bin_op_both_wrappers( gen.set_target(fn_rev) gen.arg_names = ["right", "left"] gen.emit_arg_processing(error=GotoHandler("typefail2"), raise_exception=False) + handle_third_pow_argument(fn_rev, emitter, gen, if_unsupported=["goto typefail2;"]) gen.emit_call() gen.emit_error_handling() emitter.emit_line("} else {") - emitter.emit_line(f"_Py_IDENTIFIER({fn_rev.name});") - emitter.emit_line( - 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( - op_methods_to_symbols[fn.name], fn_rev.name - ) - ) + generate_bin_op_reverse_dunder_call(fn, emitter, fn_rev.name) emitter.emit_line("}") emitter.emit_label("typefail2") emitter.emit_line("Py_INCREF(Py_NotImplemented);") @@ -418,6 +448,47 @@ def generate_bin_op_both_wrappers( gen.finish() +def generate_bin_op_reverse_dunder_call(fn: FuncIR, emitter: Emitter, rmethod: str) -> None: + if fn.name in ("__pow__", "__rpow__"): + # Ternary pow() will never call the reverse dunder. + emitter.emit_line("if (obj_mod == Py_None) {") + emitter.emit_line(f"_Py_IDENTIFIER({rmethod});") + emitter.emit_line( + 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( + op_methods_to_symbols[fn.name], rmethod + ) + ) + if fn.name in ("__pow__", "__rpow__"): + emitter.emit_line("} else {") + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + emitter.emit_line("}") + + +def handle_third_pow_argument( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator, *, if_unsupported: list[str] +) -> None: + if fn.name not in ("__pow__", "__rpow__", "__ipow__"): + return + + if (fn.name in ("__pow__", "__ipow__") and len(fn.args) == 2) or fn.name == "__rpow__": + # If the power dunder only supports two arguments and the third + # argument (AKA mod) is set to a non-default value, simply bail. + # + # Importantly, this prevents any ternary __rpow__ calls from + # happening (as per the language specification). + emitter.emit_line("if (obj_mod != Py_None) {") + for line in if_unsupported: + emitter.emit_line(line) + emitter.emit_line("}") + # The slot wrapper will receive three arguments, but the call only + # supports two so make sure that the third argument isn't passed + # along. This is needed as two-argument __(i)pow__ is allowed and + # rather common. + if len(gen.arg_names) == 3: + gen.arg_names.pop() + + RICHCOMPARE_OPS = { "__lt__": "Py_LT", "__gt__": "Py_GT", diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index befa397051ef..016a6d3ea9e0 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -344,6 +344,7 @@ CPyTagged CPyObject_Hash(PyObject *o); PyObject *CPyObject_GetAttr3(PyObject *v, PyObject *name, PyObject *defl); PyObject *CPyIter_Next(PyObject *iter); PyObject *CPyNumber_Power(PyObject *base, PyObject *index); +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index); PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); diff --git a/mypyc/lib-rt/generic_ops.c b/mypyc/lib-rt/generic_ops.c index 2f4a7941a6da..260cfec5b360 100644 --- a/mypyc/lib-rt/generic_ops.c +++ b/mypyc/lib-rt/generic_ops.c @@ -41,6 +41,11 @@ PyObject *CPyNumber_Power(PyObject *base, PyObject *index) return PyNumber_Power(base, index, Py_None); } +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index) +{ + return PyNumber_InPlacePower(base, index, Py_None); +} + PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { PyObject *start_obj = CPyTagged_AsObject(start); PyObject *end_obj = CPyTagged_AsObject(end); diff --git a/mypyc/primitives/generic_ops.py b/mypyc/primitives/generic_ops.py index 4f04608d11f3..3caec0a9875e 100644 --- a/mypyc/primitives/generic_ops.py +++ b/mypyc/primitives/generic_ops.py @@ -109,14 +109,25 @@ priority=0, ) -binary_op( - name="**", - arg_types=[object_rprimitive, object_rprimitive], - return_type=object_rprimitive, - error_kind=ERR_MAGIC, - c_function_name="CPyNumber_Power", - priority=0, -) +for op, c_function in (("**", "CPyNumber_Power"), ("**=", "CPyNumber_InPlacePower")): + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) + +for arg_count, c_function in ((2, "CPyNumber_Power"), (3, "PyNumber_Power")): + function_op( + name="builtins.pow", + arg_types=[object_rprimitive] * arg_count, + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) binary_op( name="in", diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 37aab1d826d7..27e225f273bc 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -22,6 +22,21 @@ def __divmod__(self, other: T_contra) -> T_co: ... class __SupportsRDivMod(Protocol[T_contra, T_co]): def __rdivmod__(self, other: T_contra) -> T_co: ... +_M = TypeVar("_M", contravariant=True) + +class __SupportsPow2(Protocol[T_contra, T_co]): + def __pow__(self, other: T_contra) -> T_co: ... + +class __SupportsPow3NoneOnly(Protocol[T_contra, T_co]): + def __pow__(self, other: T_contra, modulo: None = ...) -> T_co: ... + +class __SupportsPow3(Protocol[T_contra, _M, T_co]): + def __pow__(self, other: T_contra, modulo: _M) -> T_co: ... + +__SupportsSomeKindOfPow = Union[ + __SupportsPow2[Any, Any], __SupportsPow3NoneOnly[Any, Any] | __SupportsPow3[Any, Any, Any] +] + class object: def __init__(self) -> None: pass def __eq__(self, x: object) -> bool: pass @@ -99,6 +114,7 @@ def __add__(self, n: float) -> float: pass def __sub__(self, n: float) -> float: pass def __mul__(self, n: float) -> float: pass def __truediv__(self, n: float) -> float: pass + def __pow__(self, n: float) -> float: pass def __neg__(self) -> float: pass def __pos__(self) -> float: pass def __abs__(self) -> float: pass @@ -318,6 +334,12 @@ def abs(x: __SupportsAbs[T]) -> T: ... def divmod(x: __SupportsDivMod[T_contra, T_co], y: T_contra) -> T_co: ... @overload def divmod(x: T_contra, y: __SupportsRDivMod[T_contra, T_co]) -> T_co: ... +@overload +def pow(base: __SupportsPow2[T_contra, T_co], exp: T_contra, mod: None = None) -> T_co: ... +@overload +def pow(base: __SupportsPow3NoneOnly[T_contra, T_co], exp: T_contra, mod: None = None) -> T_co: ... +@overload +def pow(base: __SupportsPow3[T_contra, _M, T_co], exp: T_contra, mod: _M) -> T_co: ... def exit() -> None: ... def min(x: T, y: T) -> T: ... def max(x: T, y: T) -> T: ... diff --git a/mypyc/test-data/irbuild-any.test b/mypyc/test-data/irbuild-any.test index 8cc626100262..8d4e085179ae 100644 --- a/mypyc/test-data/irbuild-any.test +++ b/mypyc/test-data/irbuild-any.test @@ -201,6 +201,10 @@ L0: [case testFunctionBasedOps] def f() -> None: a = divmod(5, 2) +def f2() -> int: + return pow(2, 5) +def f3() -> float: + return pow(2, 5, 3) [out] def f(): r0, r1, r2 :: object @@ -212,4 +216,25 @@ L0: r3 = unbox(tuple[float, float], r2) a = r3 return 1 +def f2(): + r0, r1, r2 :: object + r3 :: int +L0: + r0 = object 2 + r1 = object 5 + r2 = CPyNumber_Power(r0, r1) + r3 = unbox(int, r2) + return r3 +def f3(): + r0, r1, r2, r3 :: object + r4 :: int + r5 :: object +L0: + r0 = object 2 + r1 = object 5 + r2 = object 3 + r3 = PyNumber_Power(r0, r1, r2) + r4 = unbox(int, r3) + r5 = box(int, r4) + return r5 diff --git a/mypyc/test-data/run-dunders.test b/mypyc/test-data/run-dunders.test index 23323c7244de..2845187de2c3 100644 --- a/mypyc/test-data/run-dunders.test +++ b/mypyc/test-data/run-dunders.test @@ -405,6 +405,9 @@ class C: def __divmod__(self, y: int) -> int: return self.x + y + 40 + def __pow__(self, y: int) -> int: + return self.x + y + 50 + def test_generic() -> None: a: Any = C() assert a + 3 == 8 @@ -421,12 +424,14 @@ def test_generic() -> None: assert a / 2 == 27 assert a // 2 == 37 assert divmod(a, 2) == 47 + assert a ** 2 == 57 def test_native() -> None: c = C() assert c + 3 == 8 assert c - 3 == 2 assert divmod(c, 3) == 48 + assert c ** 3 == 58 def test_error() -> None: a: Any = C() @@ -442,6 +447,12 @@ def test_error() -> None: assert str(e) == "unsupported operand type(s) for -: 'C' and 'str'" else: assert False + try: + a ** 'x' + except TypeError as e: + assert str(e) == "unsupported operand type(s) for **: 'C' and 'str'" + else: + assert False [case testDundersBinaryReverse] from typing import Any @@ -462,12 +473,20 @@ class C: def __rsub__(self, y: int) -> int: return self.x - y - 1 + def __pow__(self, y: int) -> int: + return self.x**y + + def __rpow__(self, y: int) -> int: + return self.x**y + 1 + def test_generic() -> None: a: Any = C() assert a + 3 == 8 assert 4 + a == 10 assert a - 3 == 2 assert 4 - a == 0 + assert a**3 == 125 + assert 4**a == 626 def test_native() -> None: c = C() @@ -475,6 +494,8 @@ def test_native() -> None: assert 4 + c == 10 assert c - 3 == 2 assert 4 - c == 0 + assert c**3 == 125 + assert 4**c == 626 def test_errors() -> None: a: Any = C() @@ -497,20 +518,37 @@ def test_errors() -> None: 'must be str, not C') else: assert False + try: + 'x' ** a + except TypeError as e: + assert str(e) == "unsupported operand type(s) for ** or pow(): 'str' and 'C'" + else: + assert False + class F: def __add__(self, x: int) -> int: return 5 + def __pow__(self, x: int) -> int: + return -5 + class G: def __add__(self, x: int) -> int: return 33 + def __pow__(self, x: int) -> int: + return -33 + def __radd__(self, x: F) -> int: return 6 + def __rpow__(self, x: F) -> int: + return -6 + def test_type_mismatch_fall_back_to_reverse() -> None: assert F() + G() == 6 + assert F()**G() == -6 [case testDundersBinaryNotImplemented] from typing import Any, Union @@ -718,6 +756,10 @@ class C: self.x += y + 5 return self + def __ipow__(self, y: int, __mod_throwaway: None = None) -> C: + self.x **= y + return self + def test_generic_1() -> None: c: Any = C() c += 3 @@ -732,6 +774,8 @@ def test_generic_1() -> None: assert c.x == 16 c //= 4 assert c.x == 40 + c **= 2 + assert c.x == 1600 def test_generic_2() -> None: c: Any = C() @@ -756,6 +800,8 @@ def test_native() -> None: assert c.x == 3 c *= 3 assert c.x == 9 + c **= 2 + assert c.x == 81 def test_error() -> None: c: Any = C() @@ -812,3 +858,88 @@ def test_dunder_min() -> None: assert max(y2, x2).val == 'xxx' assert min(y2, z2).val == 'zzz' assert max(x2, z2).val == 'zzz' + + +[case testDundersPowerSpecial] +import sys +from typing import Any, Optional +from testutil import assertRaises + +class Forward: + def __pow__(self, exp: int, mod: Optional[int] = None) -> int: + if mod is None: + return 2**exp + else: + return 2**exp % mod + +class ForwardModRequired: + def __pow__(self, exp: int, mod: int) -> int: + return 2**exp % mod + +class ForwardNotImplemented: + def __pow__(self, exp: int, mod: Optional[object] = None) -> Any: + return NotImplemented + +class Reverse: + def __rpow__(self, exp: int) -> int: + return 2**exp + 1 + +class Both: + def __pow__(self, exp: int, mod: Optional[int] = None) -> int: + if mod is None: + return 2**exp + else: + return 2**exp % mod + + def __rpow__(self, exp: int) -> int: + return 2**exp + 1 + +class Child(ForwardNotImplemented): + def __rpow__(self, exp: object) -> int: + return 50 + +class Inplace: + value = 2 + + def __ipow__(self, exp: int, mod: Optional[int] = None) -> "Inplace": + self.value **= exp - (mod or 0) + return self + +def test_native() -> None: + f = Forward() + assert f**3 == 8 + assert pow(f, 3) == 8 + assert pow(f, 3, 3) == 2 + assert pow(ForwardModRequired(), 3, 3) == 2 + b = Both() + assert b**3 == 8 + assert 3**b == 9 + assert pow(b, 3) == 8 + assert pow(b, 3, 3) == 2 + i = Inplace() + i **= 2 + assert i.value == 4 + +def test_errors() -> None: + if sys.version_info[0] >= 3 and sys.version_info[1] >= 10: + op = "** or pow()" + else: + op = "pow()" + + f = Forward() + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'Forward', 'int', 'str'"): + pow(f, 3, "x") # type: ignore + with assertRaises(TypeError, "unsupported operand type(s) for **: 'Forward' and 'str'"): + f**"x" # type: ignore + r = Reverse() + with assertRaises(TypeError, "unsupported operand type(s) for ** or pow(): 'str' and 'Reverse'"): + "x"**r # type: ignore + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'int', 'Reverse', 'int'"): + # Ternary pow() does not fallback to __rpow__ if LHS's __pow__ returns NotImplemented. + pow(3, r, 3) # type: ignore + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'ForwardNotImplemented', 'Child', 'int'"): + # Ternary pow() does not try RHS's __rpow__ first when it's a subclass and redefines + # __rpow__ unlike other ops. + pow(ForwardNotImplemented(), Child(), 3) # type: ignore + with assertRaises(TypeError, "unsupported operand type(s) for ** or pow(): 'ForwardModRequired' and 'int'"): + ForwardModRequired()**3 # type: ignore From 584a5d93d2bb1dc2df40e641049da62e276fcd2c Mon Sep 17 00:00:00 2001 From: Max Murin Date: Thu, 16 Feb 2023 16:26:25 -0800 Subject: [PATCH 279/292] Sync typeshed Source commit: https://github.com/python/typeshed/commit/75cd3022154f3456e59711865d6539af38fb3aae --- mypy/typeshed/stdlib/_codecs.pyi | 26 +- mypy/typeshed/stdlib/_csv.pyi | 8 +- mypy/typeshed/stdlib/_decimal.pyi | 23 +- mypy/typeshed/stdlib/_heapq.pyi | 3 +- mypy/typeshed/stdlib/_py_abc.pyi | 6 +- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 12 +- mypy/typeshed/stdlib/_weakref.pyi | 5 +- mypy/typeshed/stdlib/_weakrefset.pyi | 20 +- mypy/typeshed/stdlib/_winapi.pyi | 112 ++++---- mypy/typeshed/stdlib/abc.pyi | 11 +- mypy/typeshed/stdlib/aifc.pyi | 7 +- mypy/typeshed/stdlib/array.pyi | 8 +- mypy/typeshed/stdlib/asyncio/events.pyi | 26 +- mypy/typeshed/stdlib/asyncio/futures.pyi | 9 +- mypy/typeshed/stdlib/asyncio/locks.pyi | 6 +- mypy/typeshed/stdlib/asyncio/runners.pyi | 6 +- mypy/typeshed/stdlib/asyncio/streams.pyi | 6 +- mypy/typeshed/stdlib/asyncio/subprocess.pyi | 60 ++--- mypy/typeshed/stdlib/asyncio/taskgroups.pyi | 4 +- mypy/typeshed/stdlib/asyncio/timeouts.pyi | 5 +- mypy/typeshed/stdlib/asyncio/unix_events.pyi | 15 +- .../typeshed/stdlib/asyncio/windows_utils.pyi | 7 +- mypy/typeshed/stdlib/builtins.pyi | 244 ++++++++++++------ mypy/typeshed/stdlib/bz2.pyi | 6 +- mypy/typeshed/stdlib/cProfile.pyi | 10 +- mypy/typeshed/stdlib/cgi.pyi | 5 +- mypy/typeshed/stdlib/cgitb.pyi | 3 +- mypy/typeshed/stdlib/codecs.pyi | 20 +- mypy/typeshed/stdlib/collections/__init__.pyi | 150 +++++------ .../stdlib/concurrent/futures/_base.pyi | 6 +- mypy/typeshed/stdlib/contextlib.pyi | 12 +- mypy/typeshed/stdlib/copyreg.pyi | 4 +- mypy/typeshed/stdlib/csv.pyi | 6 +- mypy/typeshed/stdlib/ctypes/__init__.pyi | 24 +- mypy/typeshed/stdlib/dataclasses.pyi | 32 ++- mypy/typeshed/stdlib/datetime.pyi | 55 ++-- mypy/typeshed/stdlib/dbm/__init__.pyi | 5 +- mypy/typeshed/stdlib/dbm/dumb.pyi | 5 +- mypy/typeshed/stdlib/dbm/gnu.pyi | 6 +- mypy/typeshed/stdlib/dbm/ndbm.pyi | 6 +- mypy/typeshed/stdlib/dis.pyi | 9 +- mypy/typeshed/stdlib/distutils/ccompiler.pyi | 4 +- .../stdlib/distutils/command/check.pyi | 2 + mypy/typeshed/stdlib/distutils/version.pyi | 22 +- mypy/typeshed/stdlib/email/__init__.pyi | 6 +- .../stdlib/email/_header_value_parser.pyi | 5 +- mypy/typeshed/stdlib/email/headerregistry.pyi | 5 +- mypy/typeshed/stdlib/email/message.pyi | 5 +- mypy/typeshed/stdlib/email/parser.pyi | 11 +- mypy/typeshed/stdlib/enum.pyi | 55 ++-- mypy/typeshed/stdlib/fileinput.pyi | 8 +- mypy/typeshed/stdlib/fractions.pyi | 15 +- mypy/typeshed/stdlib/ftplib.pyi | 6 +- mypy/typeshed/stdlib/functools.pyi | 10 +- mypy/typeshed/stdlib/hashlib.pyi | 6 +- mypy/typeshed/stdlib/heapq.pyi | 3 +- mypy/typeshed/stdlib/http/client.pyi | 6 +- mypy/typeshed/stdlib/imaplib.pyi | 6 +- mypy/typeshed/stdlib/importlib/abc.pyi | 22 +- .../stdlib/importlib/metadata/__init__.pyi | 11 +- mypy/typeshed/stdlib/inspect.pyi | 25 +- mypy/typeshed/stdlib/io.pyi | 18 +- mypy/typeshed/stdlib/ipaddress.pyi | 47 ++-- mypy/typeshed/stdlib/itertools.pyi | 47 ++-- mypy/typeshed/stdlib/keyword.pyi | 10 +- .../typeshed/stdlib/lib2to3/pgen2/grammar.pyi | 6 +- mypy/typeshed/stdlib/lib2to3/pytree.pyi | 7 +- mypy/typeshed/stdlib/logging/__init__.pyi | 10 +- mypy/typeshed/stdlib/lzma.pyi | 6 +- mypy/typeshed/stdlib/mailbox.pyi | 6 +- mypy/typeshed/stdlib/marshal.pyi | 38 +-- mypy/typeshed/stdlib/mmap.pyi | 5 +- .../stdlib/multiprocessing/connection.pyi | 12 +- .../multiprocessing/dummy/connection.pyi | 11 +- .../stdlib/multiprocessing/managers.pyi | 12 +- mypy/typeshed/stdlib/multiprocessing/pool.pyi | 7 +- .../stdlib/multiprocessing/shared_memory.pyi | 4 +- mypy/typeshed/stdlib/nntplib.pyi | 6 +- mypy/typeshed/stdlib/os/__init__.pyi | 31 ++- mypy/typeshed/stdlib/pathlib.pyi | 51 ++-- mypy/typeshed/stdlib/pdb.pyi | 5 +- mypy/typeshed/stdlib/pickle.pyi | 16 +- mypy/typeshed/stdlib/plistlib.pyi | 5 +- mypy/typeshed/stdlib/profile.pyi | 8 +- mypy/typeshed/stdlib/pstats.pyi | 22 +- mypy/typeshed/stdlib/pydoc.pyi | 10 +- mypy/typeshed/stdlib/quopri.pyi | 8 +- mypy/typeshed/stdlib/re.pyi | 2 +- mypy/typeshed/stdlib/runpy.pyi | 5 +- mypy/typeshed/stdlib/select.pyi | 6 +- mypy/typeshed/stdlib/selectors.pyi | 6 +- mypy/typeshed/stdlib/shelve.pyi | 4 +- mypy/typeshed/stdlib/shlex.pyi | 4 +- mypy/typeshed/stdlib/signal.pyi | 8 +- mypy/typeshed/stdlib/smtplib.pyi | 8 +- mypy/typeshed/stdlib/socket.pyi | 8 +- mypy/typeshed/stdlib/socketserver.pyi | 22 +- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 16 +- mypy/typeshed/stdlib/sre_constants.pyi | 4 +- mypy/typeshed/stdlib/ssl.pyi | 16 +- mypy/typeshed/stdlib/statistics.pyi | 6 +- mypy/typeshed/stdlib/subprocess.pyi | 46 ++-- mypy/typeshed/stdlib/sunau.pyi | 8 +- mypy/typeshed/stdlib/sys.pyi | 14 +- mypy/typeshed/stdlib/tarfile.pyi | 26 +- mypy/typeshed/stdlib/telnetlib.pyi | 4 +- mypy/typeshed/stdlib/tempfile.pyi | 8 +- mypy/typeshed/stdlib/tkinter/__init__.pyi | 8 +- mypy/typeshed/stdlib/tkinter/ttk.pyi | 16 +- mypy/typeshed/stdlib/traceback.pyi | 10 +- mypy/typeshed/stdlib/tracemalloc.pyi | 4 +- mypy/typeshed/stdlib/turtle.pyi | 13 +- mypy/typeshed/stdlib/types.pyi | 6 +- mypy/typeshed/stdlib/typing.pyi | 24 +- mypy/typeshed/stdlib/typing_extensions.pyi | 13 +- mypy/typeshed/stdlib/unicodedata.pyi | 44 +++- mypy/typeshed/stdlib/unittest/case.pyi | 27 +- mypy/typeshed/stdlib/unittest/mock.pyi | 20 +- mypy/typeshed/stdlib/urllib/error.pyi | 6 +- mypy/typeshed/stdlib/urllib/response.pyi | 7 +- mypy/typeshed/stdlib/uu.pyi | 2 +- mypy/typeshed/stdlib/wave.pyi | 8 +- mypy/typeshed/stdlib/weakref.pyi | 20 +- mypy/typeshed/stdlib/webbrowser.pyi | 5 + mypy/typeshed/stdlib/winreg.pyi | 5 +- mypy/typeshed/stdlib/xml/dom/minidom.pyi | 8 +- mypy/typeshed/stdlib/xml/sax/__init__.pyi | 15 +- mypy/typeshed/stdlib/xmlrpc/client.pyi | 10 +- mypy/typeshed/stdlib/zipfile.pyi | 12 +- mypy/typeshed/stdlib/zoneinfo/__init__.pyi | 7 +- 130 files changed, 1135 insertions(+), 1010 deletions(-) diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index 44cc0f78028c..51f17f01ca71 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -104,35 +104,35 @@ if sys.version_info < (3, 8): def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_16_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_16_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_16_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_16_ex_decode( - __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: int = False + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False ) -> tuple[str, int, int]: ... -def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_16_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_32_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_32_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_32_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_32_ex_decode( - __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: int = False + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False ) -> tuple[str, int, int]: ... -def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_32_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_7_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_7_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_8_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def utf_8_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def mbcs_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... def code_page_decode( - __codepage: int, __data: ReadableBuffer, __errors: str | None = None, __final: int = False + __codepage: int, __data: ReadableBuffer, __errors: str | None = None, __final: bool = False ) -> tuple[str, int]: ... def code_page_encode(__code_page: int, __str: str, __errors: str | None = None) -> tuple[bytes, int]: ... - def oem_decode(__data: ReadableBuffer, __errors: str | None = None, __final: int = False) -> tuple[str, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def oem_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_csv.pyi b/mypy/typeshed/stdlib/_csv.pyi index 7d15365d3b02..7e9b9e4e7a79 100644 --- a/mypy/typeshed/stdlib/_csv.pyi +++ b/mypy/typeshed/stdlib/_csv.pyi @@ -1,9 +1,9 @@ from _typeshed import SupportsWrite from collections.abc import Iterable, Iterator -from typing import Any, Union -from typing_extensions import Literal, TypeAlias +from typing import Any +from typing_extensions import Final, Literal, TypeAlias -__version__: str +__version__: Final[str] QUOTE_ALL: Literal[1] QUOTE_MINIMAL: Literal[0] @@ -27,7 +27,7 @@ class Dialect: strict: bool def __init__(self) -> None: ... -_DialectLike: TypeAlias = Union[str, Dialect, type[Dialect]] +_DialectLike: TypeAlias = str | Dialect | type[Dialect] class _reader(Iterator[list[str]]): @property diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index 38b8ac30cc2f..b8208fe180a1 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -1,17 +1,16 @@ import numbers import sys -from _typeshed import Self from collections.abc import Container, Sequence from types import TracebackType -from typing import Any, ClassVar, NamedTuple, Union, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Final, Literal, Self, TypeAlias _Decimal: TypeAlias = Decimal | int -_DecimalNew: TypeAlias = Union[Decimal, float, str, tuple[int, Sequence[int], int]] +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] _ComparableNum: TypeAlias = Decimal | float | numbers.Rational -__version__: str -__libmpdec_version__: str +__version__: Final[str] +__libmpdec_version__: Final[str] class DecimalTuple(NamedTuple): sign: int @@ -69,9 +68,9 @@ else: def localcontext(ctx: Context | None = None) -> _ContextManager: ... class Decimal: - def __new__(cls: type[Self], value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... + def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... @classmethod - def from_float(cls: type[Self], __f: float) -> Self: ... + def from_float(cls, __f: float) -> Self: ... def __bool__(self) -> bool: ... def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def as_tuple(self) -> DecimalTuple: ... @@ -163,9 +162,9 @@ class Decimal: def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[str]]: ... - def __copy__(self: Self) -> Self: ... - def __deepcopy__(self: Self, __memo: Any) -> Self: ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, __memo: Any) -> Self: ... def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ... class _ContextManager: @@ -203,7 +202,7 @@ class Context: traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., _ignored_flags: list[_TrapType] | None = ..., ) -> None: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[Any, ...]]: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... def clear_flags(self) -> None: ... def clear_traps(self) -> None: ... def copy(self) -> Context: ... diff --git a/mypy/typeshed/stdlib/_heapq.pyi b/mypy/typeshed/stdlib/_heapq.pyi index 90dc28deb71f..8d6c3e88103e 100644 --- a/mypy/typeshed/stdlib/_heapq.pyi +++ b/mypy/typeshed/stdlib/_heapq.pyi @@ -1,8 +1,9 @@ from typing import Any, TypeVar +from typing_extensions import Final _T = TypeVar("_T") -__about__: str +__about__: Final[str] def heapify(__heap: list[Any]) -> None: ... def heappop(__heap: list[_T]) -> _T: ... diff --git a/mypy/typeshed/stdlib/_py_abc.pyi b/mypy/typeshed/stdlib/_py_abc.pyi index ddf04364a238..cc45c6ad3814 100644 --- a/mypy/typeshed/stdlib/_py_abc.pyi +++ b/mypy/typeshed/stdlib/_py_abc.pyi @@ -1,4 +1,4 @@ -from _typeshed import Self +import _typeshed from typing import Any, NewType, TypeVar _T = TypeVar("_T") @@ -8,5 +8,7 @@ _CacheToken = NewType("_CacheToken", int) def get_cache_token() -> _CacheToken: ... class ABCMeta(type): - def __new__(__mcls: type[Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any]) -> Self: ... + def __new__( + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any] + ) -> _typeshed.Self: ... def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 68ac2a9b1900..d0c6b3ab1173 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -8,9 +8,10 @@ import mmap import pickle import sys from collections.abc import Awaitable, Callable, Iterable, Set as AbstractSet +from dataclasses import Field from os import PathLike from types import FrameType, TracebackType -from typing import Any, AnyStr, Generic, Protocol, TypeVar, Union +from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar from typing_extensions import Final, Literal, LiteralString, TypeAlias, final _KT = TypeVar("_KT") @@ -264,7 +265,7 @@ IndexableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | # def __buffer__(self, __flags: int) -> memoryview: ... ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] -OptExcInfo: TypeAlias = Union[ExcInfo, tuple[None, None, None]] +OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None] # stable if sys.version_info >= (3, 10): @@ -304,3 +305,10 @@ ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] + +# experimental +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index df462ad859c7..2a43de3ffd6b 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Callable from typing import Any, Generic, TypeVar, overload -from typing_extensions import final +from typing_extensions import Self, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -21,7 +20,7 @@ class ProxyType(Generic[_T]): # "weakproxy" class ReferenceType(Generic[_T]): __callback__: Callable[[ReferenceType[_T]], Any] - def __new__(cls: type[Self], o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... + def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... def __call__(self) -> _T | None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/_weakrefset.pyi b/mypy/typeshed/stdlib/_weakrefset.pyi index fdf26641bbeb..d73d79155329 100644 --- a/mypy/typeshed/stdlib/_weakrefset.pyi +++ b/mypy/typeshed/stdlib/_weakrefset.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Iterable, Iterator, MutableSet from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -18,21 +18,21 @@ class WeakSet(MutableSet[_T], Generic[_T]): def __init__(self, data: Iterable[_T]) -> None: ... def add(self, item: _T) -> None: ... def discard(self, item: _T) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def remove(self, item: _T) -> None: ... def update(self, other: Iterable[_T]) -> None: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... - def __ior__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] - def difference(self: Self, other: Iterable[_T]) -> Self: ... - def __sub__(self: Self, other: Iterable[Any]) -> Self: ... + def __ior__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def difference(self, other: Iterable[_T]) -> Self: ... + def __sub__(self, other: Iterable[Any]) -> Self: ... def difference_update(self, other: Iterable[Any]) -> None: ... - def __isub__(self: Self, other: Iterable[Any]) -> Self: ... - def intersection(self: Self, other: Iterable[_T]) -> Self: ... - def __and__(self: Self, other: Iterable[Any]) -> Self: ... + def __isub__(self, other: Iterable[Any]) -> Self: ... + def intersection(self, other: Iterable[_T]) -> Self: ... + def __and__(self, other: Iterable[Any]) -> Self: ... def intersection_update(self, other: Iterable[Any]) -> None: ... - def __iand__(self: Self, other: Iterable[Any]) -> Self: ... + def __iand__(self, other: Iterable[Any]) -> Self: ... def issubset(self, other: Iterable[_T]) -> bool: ... def __le__(self, other: Iterable[_T]) -> bool: ... def __lt__(self, other: Iterable[_T]) -> bool: ... @@ -43,7 +43,7 @@ class WeakSet(MutableSet[_T], Generic[_T]): def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __xor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def symmetric_difference_update(self, other: Iterable[_T]) -> None: ... - def __ixor__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def __ixor__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index 5e0087e29934..e21402b801c5 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -5,13 +5,15 @@ from typing import Any, NoReturn, overload from typing_extensions import Literal, final if sys.platform == "win32": - ABOVE_NORMAL_PRIORITY_CLASS: Literal[32768] - BELOW_NORMAL_PRIORITY_CLASS: Literal[16384] - CREATE_BREAKAWAY_FROM_JOB: Literal[16777216] - CREATE_DEFAULT_ERROR_MODE: Literal[67108864] - CREATE_NO_WINDOW: Literal[134217728] - CREATE_NEW_CONSOLE: Literal[16] - CREATE_NEW_PROCESS_GROUP: Literal[512] + ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000] + BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000] + + CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000] + CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000] + CREATE_NO_WINDOW: Literal[0x8000000] + CREATE_NEW_CONSOLE: Literal[0x10] + CREATE_NEW_PROCESS_GROUP: Literal[0x200] + DETACHED_PROCESS: Literal[8] DUPLICATE_CLOSE_SOURCE: Literal[1] DUPLICATE_SAME_ACCESS: Literal[2] @@ -28,40 +30,43 @@ if sys.platform == "win32": ERROR_PIPE_CONNECTED: Literal[535] ERROR_SEM_TIMEOUT: Literal[121] - FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[524288] - FILE_FLAG_OVERLAPPED: Literal[1073741824] + FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000] + FILE_FLAG_OVERLAPPED: Literal[0x40000000] + FILE_GENERIC_READ: Literal[1179785] FILE_GENERIC_WRITE: Literal[1179926] + if sys.version_info >= (3, 8): FILE_MAP_ALL_ACCESS: Literal[983071] FILE_MAP_COPY: Literal[1] FILE_MAP_EXECUTE: Literal[32] FILE_MAP_READ: Literal[4] FILE_MAP_WRITE: Literal[2] + FILE_TYPE_CHAR: Literal[2] FILE_TYPE_DISK: Literal[1] FILE_TYPE_PIPE: Literal[3] FILE_TYPE_REMOTE: Literal[32768] FILE_TYPE_UNKNOWN: Literal[0] - GENERIC_READ: Literal[2147483648] - GENERIC_WRITE: Literal[1073741824] - HIGH_PRIORITY_CLASS: Literal[128] - INFINITE: Literal[4294967295] + GENERIC_READ: Literal[0x80000000] + GENERIC_WRITE: Literal[0x40000000] + HIGH_PRIORITY_CLASS: Literal[0x80] + INFINITE: Literal[0xFFFFFFFF] if sys.version_info >= (3, 8): - INVALID_HANDLE_VALUE: int # very large number - IDLE_PRIORITY_CLASS: Literal[64] - NORMAL_PRIORITY_CLASS: Literal[32] - REALTIME_PRIORITY_CLASS: Literal[256] - NMPWAIT_WAIT_FOREVER: Literal[4294967295] + INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] + IDLE_PRIORITY_CLASS: Literal[0x40] + NORMAL_PRIORITY_CLASS: Literal[0x20] + REALTIME_PRIORITY_CLASS: Literal[0x100] + NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF] if sys.version_info >= (3, 8): - MEM_COMMIT: Literal[4096] - MEM_FREE: Literal[65536] - MEM_IMAGE: Literal[16777216] - MEM_MAPPED: Literal[262144] - MEM_PRIVATE: Literal[131072] - MEM_RESERVE: Literal[8192] + MEM_COMMIT: Literal[0x1000] + MEM_FREE: Literal[0x10000] + MEM_IMAGE: Literal[0x1000000] + MEM_MAPPED: Literal[0x40000] + MEM_PRIVATE: Literal[0x20000] + MEM_RESERVE: Literal[0x2000] NULL: Literal[0] OPEN_EXISTING: Literal[3] @@ -72,37 +77,42 @@ if sys.platform == "win32": PIPE_TYPE_MESSAGE: Literal[4] PIPE_UNLIMITED_INSTANCES: Literal[255] PIPE_WAIT: Literal[0] + if sys.version_info >= (3, 8): - PAGE_EXECUTE: Literal[16] - PAGE_EXECUTE_READ: Literal[32] - PAGE_EXECUTE_READWRITE: Literal[64] - PAGE_EXECUTE_WRITECOPY: Literal[128] - PAGE_GUARD: Literal[256] - PAGE_NOACCESS: Literal[1] - PAGE_NOCACHE: Literal[512] - PAGE_READONLY: Literal[2] - PAGE_READWRITE: Literal[4] - PAGE_WRITECOMBINE: Literal[1024] - PAGE_WRITECOPY: Literal[8] - - PROCESS_ALL_ACCESS: Literal[2097151] - PROCESS_DUP_HANDLE: Literal[64] + PAGE_EXECUTE: Literal[0x10] + PAGE_EXECUTE_READ: Literal[0x20] + PAGE_EXECUTE_READWRITE: Literal[0x40] + PAGE_EXECUTE_WRITECOPY: Literal[0x80] + PAGE_GUARD: Literal[0x100] + PAGE_NOACCESS: Literal[0x1] + PAGE_NOCACHE: Literal[0x200] + PAGE_READONLY: Literal[0x2] + PAGE_READWRITE: Literal[0x4] + PAGE_WRITECOMBINE: Literal[0x400] + PAGE_WRITECOPY: Literal[0x8] + + PROCESS_ALL_ACCESS: Literal[0x1FFFFF] + PROCESS_DUP_HANDLE: Literal[0x40] + if sys.version_info >= (3, 8): - SEC_COMMIT: Literal[134217728] - SEC_IMAGE: Literal[16777216] - SEC_LARGE_PAGES: Literal[2147483648] - SEC_NOCACHE: Literal[268435456] - SEC_RESERVE: Literal[67108864] - SEC_WRITECOMBINE: Literal[1073741824] - STARTF_USESHOWWINDOW: Literal[1] - STARTF_USESTDHANDLES: Literal[256] - STD_ERROR_HANDLE: Literal[4294967284] - STD_INPUT_HANDLE: Literal[4294967286] - STD_OUTPUT_HANDLE: Literal[4294967285] + SEC_COMMIT: Literal[0x8000000] + SEC_IMAGE: Literal[0x1000000] + SEC_LARGE_PAGES: Literal[0x80000000] + SEC_NOCACHE: Literal[0x10000000] + SEC_RESERVE: Literal[0x4000000] + SEC_WRITECOMBINE: Literal[0x40000000] + + STARTF_USESHOWWINDOW: Literal[0x1] + STARTF_USESTDHANDLES: Literal[0x100] + + STD_ERROR_HANDLE: Literal[0xFFFFFFF4] + STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5] + STD_INPUT_HANDLE: Literal[0xFFFFFFF6] + STILL_ACTIVE: Literal[259] SW_HIDE: Literal[0] if sys.version_info >= (3, 8): - SYNCHRONIZE: Literal[1048576] + SYNCHRONIZE: Literal[0x100000] WAIT_ABANDONED_0: Literal[128] WAIT_OBJECT_0: Literal[0] WAIT_TIMEOUT: Literal[258] @@ -196,7 +206,7 @@ if sys.platform == "win32": __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None ) -> None: ... def TerminateProcess(__handle: int, __exit_code: int) -> None: ... - def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = 4294967295) -> int: ... + def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = 0xFFFFFFFF) -> int: ... def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... def WaitNamedPipe(__name: str, __timeout: int) -> None: ... @overload diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index 44a5b2289832..068dab4752be 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -1,5 +1,6 @@ +import _typeshed import sys -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Callable from typing import Any, Generic, TypeVar from typing_extensions import Literal @@ -13,10 +14,12 @@ class ABCMeta(type): __abstractmethods__: frozenset[str] if sys.version_info >= (3, 11): def __new__( - __mcls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any - ) -> Self: ... + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... else: - def __new__(mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any) -> Self: ... + def __new__( + mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... diff --git a/mypy/typeshed/stdlib/aifc.pyi b/mypy/typeshed/stdlib/aifc.pyi index ad126d6cdbef..ab0c18ed6623 100644 --- a/mypy/typeshed/stdlib/aifc.pyi +++ b/mypy/typeshed/stdlib/aifc.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from types import TracebackType from typing import IO, Any, NamedTuple, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["Error", "open"] @@ -24,7 +23,7 @@ _Marker: TypeAlias = tuple[int, int, bytes] class Aifc_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -48,7 +47,7 @@ class Aifc_read: class Aifc_write: def __init__(self, f: _File) -> None: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index 25c389c47e8e..827bbb97897f 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -1,10 +1,10 @@ import sys -from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Iterable # pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y022 -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] _FloatTypeCode: TypeAlias = Literal["f", "d"] @@ -72,8 +72,8 @@ class array(MutableSequence[_T], Generic[_T]): def __add__(self, __x: array[_T]) -> array[_T]: ... def __ge__(self, __other: array[_T]) -> bool: ... def __gt__(self, __other: array[_T]) -> bool: ... - def __iadd__(self: Self, __x: array[_T]) -> Self: ... # type: ignore[override] - def __imul__(self: Self, __n: int) -> Self: ... + def __iadd__(self, __x: array[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: int) -> Self: ... def __le__(self, __other: array[_T]) -> bool: ... def __lt__(self, __other: array[_T]) -> bool: ... def __mul__(self, __n: int) -> array[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index b2292801ee0d..f97afe873c9f 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,12 +1,12 @@ import ssl import sys -from _typeshed import FileDescriptorLike, ReadableBuffer, Self, StrPath, Unused, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket from typing import IO, Any, Protocol, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias from .base_events import Server from .futures import Future @@ -95,7 +95,7 @@ class TimerHandle(Handle): class AbstractServer: @abstractmethod def close(self) -> None: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod def get_loop(self) -> AbstractEventLoop: ... @@ -524,11 +524,11 @@ class AbstractEventLoop: stdin: int | IO[Any] | None = -1, stdout: int | IO[Any] | None = -1, stderr: int | IO[Any] | None = -1, - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... @@ -541,11 +541,11 @@ class AbstractEventLoop: stdin: int | IO[Any] | None = -1, stdout: int | IO[Any] | None = -1, stderr: int | IO[Any] | None = -1, - universal_newlines: Literal[False] = ..., - shell: Literal[False] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... @abstractmethod diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi index f325272d2403..79209f5ed4fb 100644 --- a/mypy/typeshed/stdlib/asyncio/futures.pyi +++ b/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -1,9 +1,8 @@ import sys -from _typeshed import Self from collections.abc import Awaitable, Callable, Generator, Iterable from concurrent.futures._base import Error, Future as _ConcurrentFuture from typing import Any, TypeVar -from typing_extensions import Literal, TypeGuard +from typing_extensions import Literal, Self, TypeGuard from .events import AbstractEventLoop @@ -43,8 +42,8 @@ class Future(Awaitable[_T], Iterable[_T]): def __del__(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... @property - def _callbacks(self: Self) -> list[tuple[Callable[[Self], Any], Context]]: ... - def add_done_callback(self: Self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... + def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... if sys.version_info >= (3, 9): def cancel(self, msg: Any | None = None) -> bool: ... else: @@ -54,7 +53,7 @@ class Future(Awaitable[_T], Iterable[_T]): def done(self) -> bool: ... def result(self) -> _T: ... def exception(self) -> BaseException | None: ... - def remove_done_callback(self: Self, __fn: Callable[[Self], object]) -> int: ... + def remove_done_callback(self, __fn: Callable[[Self], object]) -> int: ... def set_result(self, __result: _T) -> None: ... def set_exception(self, __exception: type | BaseException) -> None: ... def __iter__(self) -> Generator[Any, None, _T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/locks.pyi b/mypy/typeshed/stdlib/asyncio/locks.pyi index 87bcaa2110db..ab4e63ab59b1 100644 --- a/mypy/typeshed/stdlib/asyncio/locks.pyi +++ b/mypy/typeshed/stdlib/asyncio/locks.pyi @@ -1,11 +1,11 @@ import enum import sys -from _typeshed import Self, Unused +from _typeshed import Unused from collections import deque from collections.abc import Callable, Generator from types import TracebackType from typing import Any, TypeVar -from typing_extensions import Literal +from typing_extensions import Literal, Self from .events import AbstractEventLoop from .futures import Future @@ -103,7 +103,7 @@ if sys.version_info >= (3, 11): class Barrier(_LoopBoundMixin): def __init__(self, parties: int) -> None: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__(self, *args: Unused) -> None: ... async def wait(self) -> int: ... async def abort(self) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 484f9eb831a1..847072b633ac 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self, Unused +from _typeshed import Unused from collections.abc import Callable, Coroutine from contextvars import Context from typing import Any, TypeVar -from typing_extensions import final +from typing_extensions import Self, final from .events import AbstractEventLoop @@ -17,7 +17,7 @@ if sys.version_info >= (3, 11): @final class Runner: def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... def close(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 2468f482291c..f30c57305d93 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -1,9 +1,9 @@ import ssl import sys -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence from typing import Any -from typing_extensions import SupportsIndex, TypeAlias +from typing_extensions import Self, SupportsIndex, TypeAlias from . import events, protocols, transports from .base_events import Server @@ -166,5 +166,5 @@ class StreamReader(AsyncIterator[bytes]): async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ... async def read(self, n: int = -1) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... - def __aiter__(self: Self) -> Self: ... + def __aiter__(self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index b112a9d80a32..10a414f24537 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -49,11 +49,11 @@ if sys.version_info >= (3, 11): limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., # These parameters are taken by subprocess.Popen, which this ultimately delegates to executable: StrOrBytesPath | None = ..., @@ -81,11 +81,11 @@ if sys.version_info >= (3, 11): stderr: int | IO[Any] | None = None, limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, # These parameters are taken by subprocess.Popen, which this ultimately delegates to text: bool | None = ..., executable: StrOrBytesPath | None = ..., @@ -115,11 +115,11 @@ elif sys.version_info >= (3, 10): limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., # These parameters are taken by subprocess.Popen, which this ultimately delegates to executable: StrOrBytesPath | None = ..., @@ -146,11 +146,11 @@ elif sys.version_info >= (3, 10): stderr: int | IO[Any] | None = None, limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, # These parameters are taken by subprocess.Popen, which this ultimately delegates to text: bool | None = ..., executable: StrOrBytesPath | None = ..., @@ -180,11 +180,11 @@ else: # >= 3.9 limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., # These parameters are taken by subprocess.Popen, which this ultimately delegates to executable: StrOrBytesPath | None = ..., @@ -211,11 +211,11 @@ else: # >= 3.9 loop: events.AbstractEventLoop | None = None, limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, # These parameters are taken by subprocess.Popen, which this ultimately delegates to text: bool | None = ..., executable: StrOrBytesPath | None = ..., diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 9e6c6e047368..8daa96f1ede0 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -1,10 +1,10 @@ # This only exists in 3.11+. See VERSIONS. -from _typeshed import Self from collections.abc import Coroutine, Generator from contextvars import Context from types import TracebackType from typing import Any, TypeVar +from typing_extensions import Self from .tasks import Task @@ -13,7 +13,7 @@ __all__ = ["TaskGroup"] _T = TypeVar("_T") class TaskGroup: - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def create_task( self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None diff --git a/mypy/typeshed/stdlib/asyncio/timeouts.pyi b/mypy/typeshed/stdlib/asyncio/timeouts.pyi index be516b5851d1..2d31b777b77d 100644 --- a/mypy/typeshed/stdlib/asyncio/timeouts.pyi +++ b/mypy/typeshed/stdlib/asyncio/timeouts.pyi @@ -1,6 +1,5 @@ -from _typeshed import Self from types import TracebackType -from typing_extensions import final +from typing_extensions import Self, final __all__ = ("Timeout", "timeout", "timeout_at") @@ -10,7 +9,7 @@ class Timeout: def when(self) -> float | None: ... def reschedule(self, when: float | None) -> None: ... def expired(self) -> bool: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/mypy/typeshed/stdlib/asyncio/unix_events.pyi index 5e2b05f57ef1..e28d64b5287b 100644 --- a/mypy/typeshed/stdlib/asyncio/unix_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/unix_events.pyi @@ -1,10 +1,9 @@ import sys import types -from _typeshed import Self from abc import ABCMeta, abstractmethod from collections.abc import Callable from typing import Any -from typing_extensions import Literal +from typing_extensions import Literal, Self from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy from .selector_events import BaseSelectorEventLoop @@ -22,7 +21,7 @@ class AbstractChildWatcher: @abstractmethod def close(self) -> None: ... @abstractmethod - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... @abstractmethod def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... if sys.version_info >= (3, 8): @@ -64,13 +63,13 @@ if sys.platform != "win32": def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... class SafeChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... class FastChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... @@ -95,7 +94,7 @@ if sys.platform != "win32": class MultiLoopChildWatcher(AbstractChildWatcher): def is_active(self) -> bool: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -106,7 +105,7 @@ if sys.platform != "win32": class ThreadedChildWatcher(AbstractChildWatcher): def is_active(self) -> Literal[True]: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -117,7 +116,7 @@ if sys.platform != "win32": if sys.version_info >= (3, 9): class PidfdChildWatcher(AbstractChildWatcher): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi index 6ac4e0d89aa4..f3a82e2b8462 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi @@ -1,10 +1,9 @@ import subprocess import sys -from _typeshed import Self from collections.abc import Callable from types import TracebackType from typing import Any, AnyStr, Protocol -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.platform == "win32": __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") @@ -25,7 +24,7 @@ if sys.platform == "win32": else: def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @property def handle(self) -> int: ... @@ -41,7 +40,7 @@ if sys.platform == "win32": # subprocess.Popen takes other positional-or-keyword arguments before # stdin. def __new__( - cls: type[Self], + cls, args: subprocess._CMD, stdin: subprocess._FILE | None = ..., stdout: subprocess._FILE | None = ..., diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 9f45a937764b..a8bedc8374bd 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1,4 +1,5 @@ import _ast +import _typeshed import sys import types from _collections_abc import dict_items, dict_keys, dict_values @@ -11,7 +12,6 @@ from _typeshed import ( OpenBinaryModeWriting, OpenTextMode, ReadableBuffer, - Self, SupportsAdd, SupportsAiter, SupportsAnext, @@ -54,7 +54,7 @@ from typing import ( # noqa: Y022 overload, type_check_only, ) -from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, LiteralString, Self, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -82,12 +82,12 @@ class object: __module__: str __annotations__: dict[str, Any] @property - def __class__(self: Self) -> type[Self]: ... + def __class__(self) -> type[Self]: ... # Ignore errors about type mismatch between property getter and setter @__class__.setter def __class__(self, __type: type[object]) -> None: ... # noqa: F811 def __init__(self) -> None: ... - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers. # Overriding them in subclasses has different semantics, even if the override has an identical signature. def __setattr__(self, __name: str, __value: Any) -> None: ... @@ -168,9 +168,11 @@ class type: @overload def __new__(cls, __o: object) -> type: ... @overload - def __new__(cls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any) -> Self: ... + def __new__( + cls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any + ) -> _typeshed.Self: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... - def __subclasses__(self: Self) -> list[Self]: ... + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> list[type]: ... @@ -196,9 +198,9 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 class int: @overload - def __new__(cls: type[Self], __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... @overload - def __new__(cls: type[Self], __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... @@ -221,7 +223,7 @@ class int: ) -> bytes: ... @classmethod def from_bytes( - cls: type[Self], + cls, bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, byteorder: Literal["little", "big"] = "big", *, @@ -231,7 +233,7 @@ class int: def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... @classmethod def from_bytes( - cls: type[Self], + cls, bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, byteorder: Literal["little", "big"], *, @@ -257,13 +259,13 @@ class int: @overload def __pow__(self, __x: Literal[0], __modulo: None) -> Literal[1]: ... @overload - def __pow__(self, __x: _PositiveInteger, __modulo: None = ...) -> int: ... + def __pow__(self, __x: _PositiveInteger, __modulo: None = None) -> int: ... @overload - def __pow__(self, __x: _NegativeInteger, __modulo: None = ...) -> float: ... + def __pow__(self, __x: _NegativeInteger, __modulo: None = None) -> float: ... # positive x -> int; negative x -> float # return type must be Any as `int | float` causes too many false-positive errors @overload - def __pow__(self, __x: int, __modulo: None = ...) -> Any: ... + def __pow__(self, __x: int, __modulo: None = None) -> Any: ... @overload def __pow__(self, __x: int, __modulo: int) -> int: ... def __rpow__(self, __x: int, __mod: int | None = None) -> Any: ... @@ -298,12 +300,12 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls: type[Self], __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def __new__(cls, __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod - def fromhex(cls: type[Self], __s: str) -> Self: ... + def fromhex(cls, __s: str) -> Self: ... @property def real(self) -> float: ... @property @@ -330,7 +332,7 @@ class float: def __rmod__(self, __x: float) -> float: ... def __rdivmod__(self, __x: float) -> tuple[float, float]: ... @overload - def __rpow__(self, __x: _PositiveInteger, __modulo: None = ...) -> float: ... + def __rpow__(self, __x: _PositiveInteger, __modulo: None = None) -> float: ... @overload def __rpow__(self, __x: _NegativeInteger, __mod: None = None) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @@ -364,19 +366,17 @@ class complex: # Python doesn't currently accept SupportsComplex for the second argument @overload def __new__( - cls: type[Self], + cls, real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ..., imag: complex | SupportsFloat | SupportsIndex = ..., ) -> Self: ... @overload - def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... + def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... else: @overload - def __new__( - cls: type[Self], real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ... - ) -> Self: ... + def __new__(cls, real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ...) -> Self: ... @overload - def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... + def __new__(cls, real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... @property def real(self) -> float: ... @@ -410,11 +410,20 @@ class _TranslateTable(Protocol): class str(Sequence[str]): @overload - def __new__(cls: type[Self], object: object = ...) -> Self: ... + def __new__(cls, object: object = ...) -> Self: ... + @overload + def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... @overload - def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -422,11 +431,20 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: + @overload + def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -442,32 +460,91 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 + ) -> LiteralString: ... + @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -478,6 +555,9 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... + @overload def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] @@ -485,23 +565,35 @@ class str(Sequence[str]): def __ge__(self, __x: str) -> bool: ... def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + @overload + def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... class bytes(ByteString): @overload - def __new__(cls: type[Self], __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... + def __new__(cls, __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... @overload - def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... + def __new__(cls, __string: str, encoding: str, errors: str = ...) -> Self: ... @overload - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... def capitalize(self) -> bytes: ... def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytes: ... def count( @@ -573,7 +665,7 @@ class bytes(ByteString): def upper(self) -> bytes: ... def zfill(self, __width: SupportsIndex) -> bytes: ... @classmethod - def fromhex(cls: type[Self], __s: str) -> Self: ... + def fromhex(cls, __s: str) -> Self: ... @staticmethod def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... def __len__(self) -> int: ... @@ -682,7 +774,7 @@ class bytearray(MutableSequence[int], ByteString): def upper(self) -> bytearray: ... def zfill(self, __width: SupportsIndex) -> bytearray: ... @classmethod - def fromhex(cls: type[Self], __string: str) -> Self: ... + def fromhex(cls, __string: str) -> Self: ... @staticmethod def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... def __len__(self) -> int: ... @@ -699,10 +791,10 @@ class bytearray(MutableSequence[int], ByteString): def __delitem__(self, __i: SupportsIndex | slice) -> None: ... def __add__(self, __s: ReadableBuffer) -> bytearray: ... # The superclass wants us to accept Iterable[int], but that fails at runtime. - def __iadd__(self: Self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] + def __iadd__(self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] def __mul__(self, __n: SupportsIndex) -> bytearray: ... def __rmul__(self, __n: SupportsIndex) -> bytearray: ... - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] @@ -741,7 +833,7 @@ class memoryview(Sequence[int]): @property def nbytes(self) -> int: ... def __init__(self, obj: ReadableBuffer) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None ) -> None: ... @@ -776,7 +868,7 @@ class memoryview(Sequence[int]): @final class bool(int): - def __new__(cls: type[Self], __o: object = ...) -> Self: ... + def __new__(cls, __o: object = ...) -> Self: ... # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload @@ -821,7 +913,7 @@ class slice: def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): - def __new__(cls: type[Self], __iterable: Iterable[_T_co] = ...) -> Self: ... + def __new__(cls, __iterable: Iterable[_T_co] = ...) -> Self: ... def __len__(self) -> int: ... def __contains__(self, __x: object) -> bool: ... @overload @@ -909,10 +1001,10 @@ class list(MutableSequence[_T], Generic[_T]): def __add__(self, __x: list[_T]) -> list[_T]: ... @overload def __add__(self, __x: list[_S]) -> list[_S | _T]: ... - def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] def __mul__(self, __n: SupportsIndex) -> list[_T]: ... def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... def __contains__(self, __o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, __x: list[_T]) -> bool: ... @@ -941,7 +1033,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error @overload def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... - def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: ... def keys(self) -> dict_keys[_KT, _VT]: ... def values(self) -> dict_values[_KT, _VT]: ... @@ -978,9 +1070,9 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... class set(MutableSet[_T], Generic[_T]): @overload @@ -1006,13 +1098,13 @@ class set(MutableSet[_T], Generic[_T]): def __contains__(self, __o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __and__(self, __s: AbstractSet[object]) -> set[_T]: ... - def __iand__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __iand__(self, __s: AbstractSet[object]) -> Self: ... def __or__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ior__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __ior__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __sub__(self, __s: AbstractSet[_T | None]) -> set[_T]: ... - def __isub__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __isub__(self, __s: AbstractSet[object]) -> Self: ... def __xor__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ixor__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __ixor__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __le__(self, __s: AbstractSet[object]) -> bool: ... def __lt__(self, __s: AbstractSet[object]) -> bool: ... def __ge__(self, __s: AbstractSet[object]) -> bool: ... @@ -1023,9 +1115,9 @@ class set(MutableSet[_T], Generic[_T]): class frozenset(AbstractSet[_T_co], Generic[_T_co]): @overload - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... @overload - def __new__(cls: type[Self], __iterable: Iterable[_T_co]) -> Self: ... + def __new__(cls, __iterable: Iterable[_T_co]) -> Self: ... def copy(self) -> frozenset[_T_co]: ... def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... @@ -1050,7 +1142,7 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]): class enumerate(Iterator[tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[int, _T]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -1143,7 +1235,7 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: Literal[0], - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, *, _feature_version: int = -1, @@ -1154,7 +1246,7 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | _PathLike[Any], mode: str, *, - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, _feature_version: int = -1, ) -> CodeType: ... @@ -1164,7 +1256,7 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: Literal[1024], - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, *, _feature_version: int = -1, @@ -1175,7 +1267,7 @@ if sys.version_info >= (3, 8): filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int, - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, *, _feature_version: int = -1, @@ -1188,7 +1280,7 @@ else: filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: Literal[0], - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, ) -> CodeType: ... @overload @@ -1197,7 +1289,7 @@ else: filename: str | ReadableBuffer | _PathLike[Any], mode: str, *, - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, ) -> CodeType: ... @overload @@ -1206,7 +1298,7 @@ else: filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: Literal[1024], - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, ) -> _ast.AST: ... @overload @@ -1215,7 +1307,7 @@ else: filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int, - dont_inherit: int = False, + dont_inherit: bool = False, optimize: int = -1, ) -> Any: ... @@ -1262,7 +1354,7 @@ class filter(Iterator[_T], Generic[_T]): def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... @overload def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... def format(__value: object, __format_spec: str = "") -> str: ... @@ -1288,7 +1380,7 @@ def hash(__obj: object) -> int: ... def help(request: object = ...) -> None: ... def hex(__number: int | SupportsIndex) -> str: ... def id(__obj: object) -> int: ... -def input(__prompt: object = None) -> str: ... +def input(__prompt: object = "") -> str: ... class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, __i: int) -> _T_co: ... @@ -1353,35 +1445,35 @@ class map(Iterator[_S], Generic[_S]): __iter6: Iterable[Any], *iterables: Iterable[Any], ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _S: ... @overload def max( - __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: ... @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... @overload def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload def min( - __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: ... @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... @overload def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload @@ -1503,7 +1595,7 @@ class _SupportsPow2(Protocol[_E, _T_co]): def __pow__(self, __other: _E) -> _T_co: ... class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): - def __pow__(self, __other: _E, __modulo: None = ...) -> _T_co: ... + def __pow__(self, __other: _E, __modulo: None = None) -> _T_co: ... class _SupportsPow3(Protocol[_E, _M, _T_co]): def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ... @@ -1590,7 +1682,7 @@ class reversed(Iterator[_T], Generic[_T]): def __init__(self, __sequence: Reversible[_T]) -> None: ... @overload def __init__(self, __sequence: SupportsLenAndGetItem[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... def __length_hint__(self) -> int: ... @@ -1634,11 +1726,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... @@ -1734,7 +1826,7 @@ class zip(Iterator[_T_co], Generic[_T_co]): *iterables: Iterable[Any], ) -> zip[tuple[Any, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` @@ -1764,7 +1856,7 @@ class BaseException: __traceback__: TracebackType | None def __init__(self, *args: object) -> None: ... def __setstate__(self, __state: dict[str, Any] | None) -> None: ... - def with_traceback(self: Self, __tb: TracebackType | None) -> Self: ... + def with_traceback(self, __tb: TracebackType | None) -> Self: ... if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] @@ -1917,7 +2009,7 @@ if sys.version_info >= (3, 11): # See `check_exception_group.py` for use-cases and comments. class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): - def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + def __new__(cls, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... def __init__(self, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> None: ... @property def message(self) -> str: ... @@ -1933,7 +2025,7 @@ if sys.version_info >= (3, 11): ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... @overload def subgroup( - self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... @overload def split( @@ -1945,7 +2037,7 @@ if sys.version_info >= (3, 11): ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... @overload def split( - self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... # In reality it is `NonEmptySequence`: @overload @@ -1955,7 +2047,7 @@ if sys.version_info >= (3, 11): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): - def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + def __new__(cls, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... def __init__(self, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> None: ... @property def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... @@ -1965,14 +2057,12 @@ if sys.version_info >= (3, 11): self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] ) -> ExceptionGroup[_ExceptionT] | None: ... @overload - def subgroup( - self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] - ) -> ExceptionGroup[_ExceptionT_co] | None: ... + def subgroup(self, __condition: Callable[[_ExceptionT_co | Self], bool]) -> ExceptionGroup[_ExceptionT_co] | None: ... @overload # type: ignore[override] def split( self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... @overload def split( - self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] + self, __condition: Callable[[_ExceptionT_co | Self], bool] ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... diff --git a/mypy/typeshed/stdlib/bz2.pyi b/mypy/typeshed/stdlib/bz2.pyi index 8a7151d9e456..9ad80ee6f731 100644 --- a/mypy/typeshed/stdlib/bz2.pyi +++ b/mypy/typeshed/stdlib/bz2.pyi @@ -1,10 +1,10 @@ import _compression import sys from _compression import BaseStream -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Iterable from typing import IO, Any, Protocol, TextIO, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] @@ -92,7 +92,7 @@ def open( ) -> BZ2File | TextIO: ... class BZ2File(BaseStream, IO[bytes]): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... if sys.version_info >= (3, 9): @overload def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... diff --git a/mypy/typeshed/stdlib/cProfile.pyi b/mypy/typeshed/stdlib/cProfile.pyi index 77608b268f6f..8945b21427ab 100644 --- a/mypy/typeshed/stdlib/cProfile.pyi +++ b/mypy/typeshed/stdlib/cProfile.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self, StrOrBytesPath, Unused +from _typeshed import StrOrBytesPath, Unused from collections.abc import Callable from types import CodeType from typing import Any, TypeVar -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] @@ -27,11 +27,11 @@ class Profile: def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... - def run(self: Self, cmd: str) -> Self: ... - def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... if sys.version_info >= (3, 8): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *exc_info: Unused) -> None: ... def label(code: str | CodeType) -> _Label: ... # undocumented diff --git a/mypy/typeshed/stdlib/cgi.pyi b/mypy/typeshed/stdlib/cgi.pyi index 6f5637e3cce1..a2acfa92d463 100644 --- a/mypy/typeshed/stdlib/cgi.pyi +++ b/mypy/typeshed/stdlib/cgi.pyi @@ -1,10 +1,11 @@ import sys -from _typeshed import Self, SupportsGetItem, SupportsItemAccess, Unused +from _typeshed import SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type from collections.abc import Iterable, Iterator, Mapping from email.message import Message from types import TracebackType from typing import IO, Any, Protocol +from typing_extensions import Self __all__ = [ "MiniFieldStorage", @@ -105,7 +106,7 @@ class FieldStorage: max_num_fields: int | None = None, separator: str = "&", ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... def __getitem__(self, key: str) -> Any: ... diff --git a/mypy/typeshed/stdlib/cgitb.pyi b/mypy/typeshed/stdlib/cgitb.pyi index 04bcbfb0d13d..4c315bf6ca39 100644 --- a/mypy/typeshed/stdlib/cgitb.pyi +++ b/mypy/typeshed/stdlib/cgitb.pyi @@ -2,8 +2,9 @@ from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType from typing import IO, Any +from typing_extensions import Final -__UNDEF__: object # undocumented sentinel +__UNDEF__: Final[object] # undocumented sentinel def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index 33d0e6709923..5a22853b6aee 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,11 +1,11 @@ import sys import types from _codecs import * -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from abc import abstractmethod from collections.abc import Callable, Generator, Iterable from typing import Any, BinaryIO, Protocol, TextIO -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = [ "register", @@ -110,7 +110,7 @@ class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): def incrementaldecoder(self) -> _IncrementalDecoder: ... name: str def __new__( - cls: type[Self], + cls, encode: _Encoder, decode: _Decoder, streamreader: _StreamReader | None = None, @@ -210,7 +210,7 @@ class StreamWriter(Codec): def write(self, object: str) -> None: ... def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... @@ -222,9 +222,9 @@ class StreamReader(Codec): def readline(self, size: int | None = None, keepends: bool = True) -> str: ... def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... def reset(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> str: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... @@ -237,12 +237,12 @@ class StreamReaderWriter(TextIO): def readline(self, size: int | None = None) -> str: ... def readlines(self, sizehint: int | None = None) -> list[str]: ... def __next__(self) -> str: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... # These methods don't actually exist directly, but they are needed to satisfy the TextIO @@ -271,12 +271,12 @@ class StreamRecoder(BinaryIO): def readline(self, size: int | None = None) -> bytes: ... def readlines(self, sizehint: int | None = None) -> list[bytes]: ... def __next__(self) -> bytes: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def write(self, data: bytes) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[bytes]) -> None: ... def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index d4c537b1384e..893a289d3cb1 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -1,8 +1,8 @@ import sys from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT from typing import Any, Generic, NoReturn, TypeVar, overload -from typing_extensions import SupportsIndex, final +from typing_extensions import Self, SupportsIndex, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -68,8 +68,8 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __delitem__(self, key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __contains__(self, key: object) -> bool: ... - def copy(self: Self) -> Self: ... - def __copy__(self: Self) -> Self: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. @@ -85,9 +85,9 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class UserList(MutableSequence[_T]): data: list[_T] @@ -105,32 +105,32 @@ class UserList(MutableSequence[_T]): @overload def __getitem__(self, i: SupportsIndex) -> _T: ... @overload - def __getitem__(self: Self, i: slice) -> Self: ... + def __getitem__(self, i: slice) -> Self: ... @overload def __setitem__(self, i: SupportsIndex, item: _T) -> None: ... @overload def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ... def __delitem__(self, i: SupportsIndex | slice) -> None: ... - def __add__(self: Self, other: Iterable[_T]) -> Self: ... - def __radd__(self: Self, other: Iterable[_T]) -> Self: ... - def __iadd__(self: Self, other: Iterable[_T]) -> Self: ... - def __mul__(self: Self, n: int) -> Self: ... - def __rmul__(self: Self, n: int) -> Self: ... - def __imul__(self: Self, n: int) -> Self: ... + def __add__(self, other: Iterable[_T]) -> Self: ... + def __radd__(self, other: Iterable[_T]) -> Self: ... + def __iadd__(self, other: Iterable[_T]) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __imul__(self, n: int) -> Self: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... def pop(self, i: int = -1) -> _T: ... def remove(self, item: _T) -> None: ... - def copy(self: Self) -> Self: ... - def __copy__(self: Self) -> Self: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... def count(self, item: _T) -> int: ... # All arguments are passed to `list.index` at runtime, so the signature should be kept in line with `list.index`. - def index(self, item: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, item: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. @overload - def sort(self: UserList[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... @overload - def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... def extend(self, other: Iterable[_T]) -> None: ... class UserString(Sequence[UserString]): @@ -147,30 +147,30 @@ class UserString(Sequence[UserString]): def __eq__(self, string: object) -> bool: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... - def __getitem__(self: Self, index: SupportsIndex | slice) -> Self: ... - def __iter__(self: Self) -> Iterator[Self]: ... - def __reversed__(self: Self) -> Iterator[Self]: ... - def __add__(self: Self, other: object) -> Self: ... - def __radd__(self: Self, other: object) -> Self: ... - def __mul__(self: Self, n: int) -> Self: ... - def __rmul__(self: Self, n: int) -> Self: ... - def __mod__(self: Self, args: Any) -> Self: ... + def __getitem__(self, index: SupportsIndex | slice) -> Self: ... + def __iter__(self) -> Iterator[Self]: ... + def __reversed__(self) -> Iterator[Self]: ... + def __add__(self, other: object) -> Self: ... + def __radd__(self, other: object) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __mod__(self, args: Any) -> Self: ... if sys.version_info >= (3, 8): - def __rmod__(self: Self, template: object) -> Self: ... + def __rmod__(self, template: object) -> Self: ... else: - def __rmod__(self: Self, format: Any) -> Self: ... + def __rmod__(self, format: Any) -> Self: ... - def capitalize(self: Self) -> Self: ... - def casefold(self: Self) -> Self: ... - def center(self: Self, width: int, *args: Any) -> Self: ... + def capitalize(self) -> Self: ... + def casefold(self) -> Self: ... + def center(self, width: int, *args: Any) -> Self: ... def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... if sys.version_info >= (3, 8): def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ... else: - def encode(self: Self, encoding: str | None = None, errors: str | None = None) -> Self: ... + def encode(self, encoding: str | None = None, errors: str | None = None) -> Self: ... def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... - def expandtabs(self: Self, tabsize: int = 8) -> Self: ... + def expandtabs(self, tabsize: int = 8) -> Self: ... def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... def format_map(self, mapping: Mapping[str, Any]) -> str: ... @@ -188,63 +188,63 @@ class UserString(Sequence[UserString]): def isupper(self) -> bool: ... def isascii(self) -> bool: ... def join(self, seq: Iterable[str]) -> str: ... - def ljust(self: Self, width: int, *args: Any) -> Self: ... - def lower(self: Self) -> Self: ... - def lstrip(self: Self, chars: str | None = None) -> Self: ... + def ljust(self, width: int, *args: Any) -> Self: ... + def lower(self) -> Self: ... + def lstrip(self, chars: str | None = None) -> Self: ... maketrans = str.maketrans def partition(self, sep: str) -> tuple[str, str, str]: ... if sys.version_info >= (3, 9): - def removeprefix(self: Self, __prefix: str | UserString) -> Self: ... - def removesuffix(self: Self, __suffix: str | UserString) -> Self: ... + def removeprefix(self, __prefix: str | UserString) -> Self: ... + def removesuffix(self, __suffix: str | UserString) -> Self: ... - def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... + def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... - def rjust(self: Self, width: int, *args: Any) -> Self: ... + def rjust(self, width: int, *args: Any) -> Self: ... def rpartition(self, sep: str) -> tuple[str, str, str]: ... - def rstrip(self: Self, chars: str | None = None) -> Self: ... + def rstrip(self, chars: str | None = None) -> Self: ... def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... def splitlines(self, keepends: bool = False) -> list[str]: ... def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... - def strip(self: Self, chars: str | None = None) -> Self: ... - def swapcase(self: Self) -> Self: ... - def title(self: Self) -> Self: ... - def translate(self: Self, *args: Any) -> Self: ... - def upper(self: Self) -> Self: ... - def zfill(self: Self, width: int) -> Self: ... + def strip(self, chars: str | None = None) -> Self: ... + def swapcase(self) -> Self: ... + def title(self) -> Self: ... + def translate(self, *args: Any) -> Self: ... + def upper(self) -> Self: ... + def zfill(self, width: int) -> Self: ... class deque(MutableSequence[_T], Generic[_T]): @property def maxlen(self) -> int | None: ... @overload - def __init__(self, *, maxlen: int | None = ...) -> None: ... + def __init__(self, *, maxlen: int | None = None) -> None: ... @overload - def __init__(self, iterable: Iterable[_T], maxlen: int | None = ...) -> None: ... + def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... def append(self, __x: _T) -> None: ... def appendleft(self, __x: _T) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def count(self, __x: _T) -> int: ... def extend(self, __iterable: Iterable[_T]) -> None: ... def extendleft(self, __iterable: Iterable[_T]) -> None: ... def insert(self, __i: int, __x: _T) -> None: ... - def index(self, __x: _T, __start: int = ..., __stop: int = ...) -> int: ... + def index(self, __x: _T, __start: int = 0, __stop: int = ...) -> int: ... def pop(self) -> _T: ... # type: ignore[override] def popleft(self) -> _T: ... def remove(self, __value: _T) -> None: ... - def rotate(self, __n: int = ...) -> None: ... - def __copy__(self: Self) -> Self: ... + def rotate(self, __n: int = 1) -> None: ... + def __copy__(self) -> Self: ... def __len__(self) -> int: ... # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores def __getitem__(self, __index: SupportsIndex) -> _T: ... # type: ignore[override] def __setitem__(self, __i: SupportsIndex, __x: _T) -> None: ... # type: ignore[override] def __delitem__(self, __i: SupportsIndex) -> None: ... # type: ignore[override] def __contains__(self, __o: object) -> bool: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... - def __iadd__(self: Self, __iterable: Iterable[_T]) -> Self: ... - def __add__(self: Self, __other: Self) -> Self: ... - def __mul__(self: Self, __other: int) -> Self: ... - def __imul__(self: Self, __other: int) -> Self: ... + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self, __iterable: Iterable[_T]) -> Self: ... + def __add__(self, __other: Self) -> Self: ... + def __mul__(self, __other: int) -> Self: ... + def __imul__(self, __other: int) -> Self: ... def __lt__(self, __other: deque[_T]) -> bool: ... def __le__(self, __other: deque[_T]) -> bool: ... def __gt__(self, __other: deque[_T]) -> bool: ... @@ -261,7 +261,7 @@ class Counter(dict[_T, int], Generic[_T]): def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... @overload def __init__(self, __iterable: Iterable[_T]) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def elements(self) -> Iterator[_T]: ... def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... @classmethod @@ -281,9 +281,9 @@ class Counter(dict[_T, int], Generic[_T]): @overload # type: ignore[override] def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload - def update(self, __m: Iterable[_T], **kwargs: int) -> None: ... + def update(self, __iterable: Iterable[_T], **kwargs: int) -> None: ... @overload - def update(self, __m: None = ..., **kwargs: int) -> None: ... + def update(self, __iterable: None = None, **kwargs: int) -> None: ... def __missing__(self, key: _T) -> int: ... def __delitem__(self, elem: object) -> None: ... if sys.version_info >= (3, 10): @@ -297,10 +297,10 @@ class Counter(dict[_T, int], Generic[_T]): def __pos__(self) -> Counter[_T]: ... def __neg__(self) -> Counter[_T]: ... # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. - def __iadd__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[misc] - def __isub__(self: Self, other: Counter[_T]) -> Self: ... - def __iand__(self: Self, other: Counter[_T]) -> Self: ... - def __ior__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] + def __iadd__(self, other: Counter[_T]) -> Self: ... # type: ignore[misc] + def __isub__(self, other: Counter[_T]) -> Self: ... + def __iand__(self, other: Counter[_T]) -> Self: ... + def __ior__(self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] if sys.version_info >= (3, 10): def total(self) -> int: ... def __le__(self, other: Counter[Any]) -> bool: ... @@ -338,7 +338,7 @@ class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... def move_to_end(self, key: _KT, last: bool = True) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def __reversed__(self) -> Iterator[_KT]: ... def keys(self) -> _odict_keys[_KT, _VT]: ... def items(self) -> _odict_items[_KT, _VT]: ... @@ -387,15 +387,15 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): **kwargs: _VT, ) -> None: ... def __missing__(self, __key: _KT) -> _VT: ... - def __copy__(self: Self) -> Self: ... - def copy(self: Self) -> Self: ... + def __copy__(self) -> Self: ... + def copy(self) -> Self: ... class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... - def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... @property - def parents(self: Self) -> Self: ... + def parents(self) -> Self: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @@ -412,13 +412,13 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def pop(self, key: _KT) -> _VT: ... @overload - def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... - def copy(self: Self) -> Self: ... + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def copy(self) -> Self: ... __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], __value: None = ...) -> ChainMap[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], __value: None = None) -> ChainMap[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... @@ -427,6 +427,6 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi index 64084a884433..e792cf1a83c0 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi @@ -1,11 +1,11 @@ import sys import threading -from _typeshed import Self, Unused +from _typeshed import Unused from collections.abc import Callable, Iterable, Iterator, Sequence from logging import Logger from types import TracebackType from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, ParamSpec, SupportsIndex +from typing_extensions import Literal, ParamSpec, Self, SupportsIndex if sys.version_info >= (3, 9): from types import GenericAlias @@ -62,7 +62,7 @@ class Executor: else: def shutdown(self, wait: bool = True) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index 522285abbc72..feb43aabb039 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -1,11 +1,11 @@ import abc import sys -from _typeshed import FileDescriptorOrPath, Self, Unused +from _typeshed import FileDescriptorOrPath, Unused from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ "contextmanager", @@ -140,9 +140,9 @@ class ExitStack(metaclass=abc.ABCMeta): def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... - def pop_all(self: Self) -> Self: ... + def pop_all(self) -> Self: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None ) -> bool: ... @@ -163,9 +163,9 @@ class AsyncExitStack(metaclass=abc.ABCMeta): def push_async_callback( self, __callback: Callable[_P, Awaitable[_T]], *args: _P.args, **kwds: _P.kwargs ) -> Callable[_P, Awaitable[_T]]: ... - def pop_all(self: Self) -> Self: ... + def pop_all(self) -> Self: ... async def aclose(self) -> None: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__( self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None ) -> bool: ... diff --git a/mypy/typeshed/stdlib/copyreg.pyi b/mypy/typeshed/stdlib/copyreg.pyi index 07338b422385..8f7fd957fc52 100644 --- a/mypy/typeshed/stdlib/copyreg.pyi +++ b/mypy/typeshed/stdlib/copyreg.pyi @@ -1,9 +1,9 @@ from collections.abc import Callable, Hashable -from typing import Any, SupportsInt, TypeVar, Union +from typing import Any, SupportsInt, TypeVar from typing_extensions import TypeAlias _T = TypeVar("_T") -_Reduce: TypeAlias = Union[tuple[Callable[..., _T], tuple[Any, ...]], tuple[Callable[..., _T], tuple[Any, ...], Any | None]] +_Reduce: TypeAlias = tuple[Callable[..., _T], tuple[Any, ...]] | tuple[Callable[..., _T], tuple[Any, ...], Any | None] __all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_extension_cache"] diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index 13b483b219d5..234b189fb3db 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -21,10 +21,10 @@ from _csv import ( unregister_dialect as unregister_dialect, writer as writer, ) -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 8): from builtins import dict as _DictReadMapping @@ -107,7 +107,7 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): quoting: _QuotingType = ..., strict: bool = ..., ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _DictReadMapping[_T | Any, str | Any]: ... if sys.version_info >= (3, 12): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 5c4299989d92..aaaacf287903 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -1,10 +1,10 @@ import sys from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL -from _typeshed import ReadableBuffer, Self, WriteableBuffer +from _typeshed import ReadableBuffer, WriteableBuffer from abc import abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence -from typing import Any, ClassVar, Generic, TypeVar, Union as _UnionT, overload -from typing_extensions import TypeAlias +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -77,21 +77,21 @@ class _CData(metaclass=_CDataMeta): _b_needsfree_: bool _objects: Mapping[Any, int] | None @classmethod - def from_buffer(cls: type[Self], source: WriteableBuffer, offset: int = ...) -> Self: ... + def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_buffer_copy(cls: type[Self], source: ReadableBuffer, offset: int = ...) -> Self: ... + def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_address(cls: type[Self], address: int) -> Self: ... + def from_address(cls, address: int) -> Self: ... @classmethod - def from_param(cls: type[Self], obj: Any) -> Self | _CArgObject: ... + def from_param(cls, obj: Any) -> Self | _CArgObject: ... @classmethod - def in_dll(cls: type[Self], library: CDLL, name: str) -> Self: ... + def in_dll(cls, library: CDLL, name: str) -> Self: ... class _CanCastTo(_CData): ... class _PointerLike(_CanCastTo): ... _ECT: TypeAlias = Callable[[type[_CData] | None, _FuncPointer, tuple[_CData, ...]], _CData] -_PF: TypeAlias = _UnionT[tuple[int], tuple[int, str], tuple[int, str, Any]] +_PF: TypeAlias = tuple[int] | tuple[int, str] | tuple[int, str, Any] class _FuncPointer(_PointerLike, _CData): restype: type[_CData] | Callable[[int], Any] | None @@ -271,7 +271,11 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index 3b7327137ec5..c02aaabe6196 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -1,9 +1,10 @@ import enum import sys import types +from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping -from typing import Any, ClassVar, Generic, Protocol, TypeVar, overload +from typing import Any, Generic, Protocol, TypeVar, overload from typing_extensions import Literal, TypeAlias, TypeGuard if sys.version_info >= (3, 9): @@ -30,10 +31,7 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["KW_ONLY"] -class _DataclassInstance(Protocol): - __dataclass_fields__: ClassVar[dict[str, Field[Any]]] - -_DataclassT = TypeVar("_DataclassT", bound=_DataclassInstance) +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) # define _MISSING_TYPE as an enum within the type stubs, # even though that is not really its type at runtime @@ -49,26 +47,26 @@ if sys.version_info >= (3, 10): class KW_ONLY: ... @overload -def asdict(obj: _DataclassInstance) -> dict[str, Any]: ... +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... @overload -def asdict(obj: _DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: _DataclassInstance) -> tuple[Any, ...]: ... +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... @overload -def astuple(obj: _DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... +def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... if sys.version_info >= (3, 8): # cls argument is now positional-only @overload - def dataclass(__cls: type[_T]) -> type[_T]: ... - @overload def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(__cls: type[_T]) -> type[_T]: ... else: - @overload - def dataclass(_cls: type[_T]) -> type[_T]: ... @overload def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(_cls: type[_T]) -> type[_T]: ... if sys.version_info >= (3, 11): @overload @@ -223,13 +221,13 @@ else: metadata: Mapping[Any, Any] | None = None, ) -> Any: ... -def fields(class_or_instance: _DataclassInstance | type[_DataclassInstance]) -> tuple[Field[Any], ...]: ... +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... @overload -def is_dataclass(obj: _DataclassInstance | type[_DataclassInstance]) -> Literal[True]: ... +def is_dataclass(obj: DataclassInstance | type[DataclassInstance]) -> Literal[True]: ... @overload -def is_dataclass(obj: type) -> TypeGuard[type[_DataclassInstance]]: ... +def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... @overload -def is_dataclass(obj: object) -> TypeGuard[_DataclassInstance | type[_DataclassInstance]]: ... +def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... class FrozenInstanceError(AttributeError): ... diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 377ef0067485..4da5501ce76d 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -1,9 +1,8 @@ import sys -from _typeshed import Self from abc import abstractmethod from time import struct_time from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final if sys.version_info >= (3, 11): __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") @@ -50,18 +49,18 @@ class date: min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] - def __new__(cls: type[Self], year: int, month: int, day: int) -> Self: ... + def __new__(cls, year: int, month: int, day: int) -> Self: ... @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float) -> Self: ... + def fromtimestamp(cls, __timestamp: float) -> Self: ... @classmethod - def today(cls: type[Self]) -> Self: ... + def today(cls) -> Self: ... @classmethod - def fromordinal(cls: type[Self], __n: int) -> Self: ... + def fromordinal(cls, __n: int) -> Self: ... @classmethod - def fromisoformat(cls: type[Self], __date_string: str) -> Self: ... + def fromisoformat(cls, __date_string: str) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def fromisocalendar(cls: type[Self], year: int, week: int, day: int) -> Self: ... + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... @property def year(self) -> int: ... @@ -82,16 +81,16 @@ class date: def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... - def replace(self: Self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... + def replace(self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... def __le__(self, __other: date) -> bool: ... def __lt__(self, __other: date) -> bool: ... def __ge__(self, __other: date) -> bool: ... def __gt__(self, __other: date) -> bool: ... if sys.version_info >= (3, 8): - def __add__(self: Self, __other: timedelta) -> Self: ... - def __radd__(self: Self, __other: timedelta) -> Self: ... + def __add__(self, __other: timedelta) -> Self: ... + def __radd__(self, __other: timedelta) -> Self: ... @overload - def __sub__(self: Self, __other: timedelta) -> Self: ... + def __sub__(self, __other: timedelta) -> Self: ... @overload def __sub__(self, __other: datetime) -> NoReturn: ... @overload @@ -119,7 +118,7 @@ class time: max: ClassVar[time] resolution: ClassVar[timedelta] def __new__( - cls: type[Self], + cls, hour: int = ..., minute: int = ..., second: int = ..., @@ -146,7 +145,7 @@ class time: def __gt__(self, __other: time) -> bool: ... def isoformat(self, timespec: str = ...) -> str: ... @classmethod - def fromisoformat(cls: type[Self], __time_string: str) -> Self: ... + def fromisoformat(cls, __time_string: str) -> Self: ... # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ @@ -160,7 +159,7 @@ class time: def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... def replace( - self: Self, + self, hour: int = ..., minute: int = ..., second: int = ..., @@ -178,7 +177,7 @@ class timedelta: max: ClassVar[timedelta] resolution: ClassVar[timedelta] def __new__( - cls: type[Self], + cls, days: float = ..., seconds: float = ..., microseconds: float = ..., @@ -223,7 +222,7 @@ class datetime(date): min: ClassVar[datetime] max: ClassVar[datetime] def __new__( - cls: type[Self], + cls, year: int, month: int, day: int, @@ -252,35 +251,35 @@ class datetime(date): # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): @classmethod - def fromtimestamp(cls: type[Self], timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = ...) -> Self: ... else: @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + def fromtimestamp(cls, __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... @classmethod - def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... + def utcfromtimestamp(cls, __t: float) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def now(cls: type[Self], tz: _TzInfo | None = None) -> Self: ... + def now(cls, tz: _TzInfo | None = None) -> Self: ... else: @overload @classmethod - def now(cls: type[Self], tz: None = None) -> Self: ... + def now(cls, tz: None = None) -> Self: ... @overload @classmethod def now(cls, tz: _TzInfo) -> datetime: ... @classmethod - def utcnow(cls: type[Self]) -> Self: ... + def utcnow(cls) -> Self: ... @classmethod - def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> datetime: ... + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... def timestamp(self) -> float: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _Date: ... def time(self) -> _Time: ... def timetz(self) -> _Time: ... def replace( - self: Self, + self, year: int = ..., month: int = ..., day: int = ..., @@ -293,13 +292,13 @@ class datetime(date): fold: int = ..., ) -> Self: ... if sys.version_info >= (3, 8): - def astimezone(self: Self, tz: _TzInfo | None = ...) -> Self: ... + def astimezone(self, tz: _TzInfo | None = ...) -> Self: ... else: def astimezone(self, tz: _TzInfo | None = ...) -> datetime: ... def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... @classmethod - def strptime(cls, __date_string: str, __format: str) -> datetime: ... + def strptime(cls, __date_string: str, __format: str) -> Self: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... @@ -309,7 +308,7 @@ class datetime(date): def __gt__(self, __other: datetime) -> bool: ... # type: ignore[override] if sys.version_info >= (3, 8): @overload # type: ignore[override] - def __sub__(self: Self, __other: timedelta) -> Self: ... + def __sub__(self, __other: timedelta) -> Self: ... @overload def __sub__(self: _D, __other: _D) -> timedelta: ... else: diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi index ab224086b7be..0068d67b6ad1 100644 --- a/mypy/typeshed/stdlib/dbm/__init__.pyi +++ b/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -1,7 +1,6 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from types import TracebackType -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = ["open", "whichdb", "error"] @@ -82,7 +81,7 @@ class _Database(MutableMapping[_KeyType, bytes]): def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/dbm/dumb.pyi b/mypy/typeshed/stdlib/dbm/dumb.pyi index d65d163ab568..1fc68cf71f9d 100644 --- a/mypy/typeshed/stdlib/dbm/dumb.pyi +++ b/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -1,7 +1,6 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from types import TracebackType -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = ["error", "open"] @@ -24,7 +23,7 @@ class _Database(MutableMapping[_KeyType, bytes]): def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/dbm/gnu.pyi b/mypy/typeshed/stdlib/dbm/gnu.pyi index adaf6fa8e69b..3dc66a30c370 100644 --- a/mypy/typeshed/stdlib/dbm/gnu.pyi +++ b/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -1,8 +1,8 @@ import sys -from _typeshed import ReadOnlyBuffer, Self +from _typeshed import ReadOnlyBuffer from types import TracebackType from typing import TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias if sys.platform != "win32": _T = TypeVar("_T") @@ -24,7 +24,7 @@ if sys.platform != "win32": def __delitem__(self, key: _KeyType) -> None: ... def __contains__(self, key: _KeyType) -> bool: ... def __len__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/dbm/ndbm.pyi b/mypy/typeshed/stdlib/dbm/ndbm.pyi index ac0b75dfa45b..1106fb2a8e7e 100644 --- a/mypy/typeshed/stdlib/dbm/ndbm.pyi +++ b/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -1,8 +1,8 @@ import sys -from _typeshed import ReadOnlyBuffer, Self +from _typeshed import ReadOnlyBuffer from types import TracebackType from typing import TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias if sys.platform != "win32": _T = TypeVar("_T") @@ -20,7 +20,7 @@ if sys.platform != "win32": def __delitem__(self, key: _KeyType) -> None: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index ea837f09c806..ac0c5356f5f9 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -1,10 +1,9 @@ import sys import types -from _typeshed import Self from collections.abc import Callable, Iterator from opcode import * # `dis` re-exports it as a part of public API from typing import IO, Any, NamedTuple -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = [ "code_info", @@ -82,15 +81,13 @@ class Bytecode: adaptive: bool = False, ) -> None: ... @classmethod - def from_traceback( - cls: type[Self], tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False - ) -> Self: ... + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... else: def __init__( self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None ) -> None: ... @classmethod - def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... + def from_traceback(cls, tb: types.TracebackType) -> Self: ... def __iter__(self) -> Iterator[Instruction]: ... def info(self) -> str: ... diff --git a/mypy/typeshed/stdlib/distutils/ccompiler.pyi b/mypy/typeshed/stdlib/distutils/ccompiler.pyi index 711b30ba4e0e..e7277aa3f9c4 100644 --- a/mypy/typeshed/stdlib/distutils/ccompiler.pyi +++ b/mypy/typeshed/stdlib/distutils/ccompiler.pyi @@ -1,8 +1,8 @@ from collections.abc import Callable -from typing import Any, Union +from typing import Any from typing_extensions import TypeAlias -_Macro: TypeAlias = Union[tuple[str], tuple[str, str | None]] +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] diff --git a/mypy/typeshed/stdlib/distutils/command/check.pyi b/mypy/typeshed/stdlib/distutils/command/check.pyi index cdbe40fff71d..9cbcc6c87f21 100644 --- a/mypy/typeshed/stdlib/distutils/command/check.pyi +++ b/mypy/typeshed/stdlib/distutils/command/check.pyi @@ -6,6 +6,8 @@ from ..cmd import Command _Reporter: TypeAlias = Any # really docutils.utils.Reporter # Only defined if docutils is installed. +# Depends on a third-party stub. Since distutils is deprecated anyway, +# it's easier to just suppress the "any subclassing" error. class SilentReporter(_Reporter): messages: Any def __init__( diff --git a/mypy/typeshed/stdlib/distutils/version.pyi b/mypy/typeshed/stdlib/distutils/version.pyi index 4f1b64a7381d..47da65ef87aa 100644 --- a/mypy/typeshed/stdlib/distutils/version.pyi +++ b/mypy/typeshed/stdlib/distutils/version.pyi @@ -1,36 +1,36 @@ -from _typeshed import Self from abc import abstractmethod from re import Pattern +from typing_extensions import Self class Version: def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self | str) -> bool: ... - def __le__(self: Self, other: Self | str) -> bool: ... - def __gt__(self: Self, other: Self | str) -> bool: ... - def __ge__(self: Self, other: Self | str) -> bool: ... + def __lt__(self, other: Self | str) -> bool: ... + def __le__(self, other: Self | str) -> bool: ... + def __gt__(self, other: Self | str) -> bool: ... + def __ge__(self, other: Self | str) -> bool: ... @abstractmethod def __init__(self, vstring: str | None = None) -> None: ... @abstractmethod - def parse(self: Self, vstring: str) -> Self: ... + def parse(self, vstring: str) -> Self: ... @abstractmethod def __str__(self) -> str: ... @abstractmethod - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... class StrictVersion(Version): version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None def __init__(self, vstring: str | None = None) -> None: ... - def parse(self: Self, vstring: str) -> Self: ... + def parse(self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... class LooseVersion(Version): component_re: Pattern[str] vstring: str version: tuple[str | int, ...] def __init__(self, vstring: str | None = None) -> None: ... - def parse(self: Self, vstring: str) -> Self: ... + def parse(self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... diff --git a/mypy/typeshed/stdlib/email/__init__.pyi b/mypy/typeshed/stdlib/email/__init__.pyi index 6b59dc73d5cc..fca302f5f1a7 100644 --- a/mypy/typeshed/stdlib/email/__init__.pyi +++ b/mypy/typeshed/stdlib/email/__init__.pyi @@ -1,12 +1,12 @@ from collections.abc import Callable from email.message import Message from email.policy import Policy -from typing import IO, Union +from typing import IO from typing_extensions import TypeAlias # Definitions imported by multiple submodules in typeshed -_ParamType: TypeAlias = Union[str, tuple[str | None, str | None, str]] # noqa: Y047 -_ParamsType: TypeAlias = Union[str, None, tuple[str, str | None, str]] # noqa: Y047 +_ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 +_ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_bytes(s: bytes | bytearray, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/_header_value_parser.pyi b/mypy/typeshed/stdlib/email/_header_value_parser.pyi index 0e422294e77a..97008140ec5d 100644 --- a/mypy/typeshed/stdlib/email/_header_value_parser.pyi +++ b/mypy/typeshed/stdlib/email/_header_value_parser.pyi @@ -1,11 +1,10 @@ import sys -from _typeshed import Self from collections.abc import Iterable, Iterator from email.errors import HeaderParseError, MessageDefect from email.policy import Policy from re import Pattern from typing import Any -from typing_extensions import Final +from typing_extensions import Final, Self WSP: Final[set[str]] CFWS_LEADER: Final[set[str]] @@ -318,7 +317,7 @@ class Terminal(str): syntactic_break: bool token_type: str defects: list[MessageDefect] - def __new__(cls: type[Self], value: str, token_type: str) -> Self: ... + def __new__(cls, value: str, token_type: str) -> Self: ... def pprint(self) -> None: ... @property def all_defects(self) -> list[MessageDefect]: ... diff --git a/mypy/typeshed/stdlib/email/headerregistry.pyi b/mypy/typeshed/stdlib/email/headerregistry.pyi index df07e2458e81..e158e89818f7 100644 --- a/mypy/typeshed/stdlib/email/headerregistry.pyi +++ b/mypy/typeshed/stdlib/email/headerregistry.pyi @@ -1,6 +1,5 @@ import sys import types -from _typeshed import Self from collections.abc import Iterable, Mapping from datetime import datetime as _datetime from email._header_value_parser import ( @@ -15,7 +14,7 @@ from email._header_value_parser import ( from email.errors import MessageDefect from email.policy import Policy from typing import Any, ClassVar, Protocol -from typing_extensions import Literal +from typing_extensions import Literal, Self class BaseHeader(str): # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) @@ -24,7 +23,7 @@ class BaseHeader(str): def name(self) -> str: ... @property def defects(self) -> tuple[MessageDefect, ...]: ... - def __new__(cls: type[Self], name: str, value: Any) -> Self: ... + def __new__(cls, name: str, value: Any) -> Self: ... def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... def fold(self, *, policy: Policy) -> str: ... diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 2777450a77ba..14e018073103 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -1,4 +1,3 @@ -from _typeshed import Self from collections.abc import Generator, Iterator, Sequence from email import _ParamsType, _ParamType from email.charset import Charset @@ -6,7 +5,7 @@ from email.contentmanager import ContentManager from email.errors import MessageDefect from email.policy import Policy from typing import Any, TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = ["Message", "EmailMessage"] @@ -84,7 +83,7 @@ class Message: def get_charsets(self, failobj: None = None) -> list[str] | None: ... @overload def get_charsets(self, failobj: _T) -> list[str] | _T: ... - def walk(self: Self) -> Generator[Self, None, None]: ... + def walk(self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: ... def as_bytes(self, unixfrom: bool = False, policy: Policy | None = None) -> bytes: ... diff --git a/mypy/typeshed/stdlib/email/parser.pyi b/mypy/typeshed/stdlib/email/parser.pyi index ba5dace28916..28b6aca856ca 100644 --- a/mypy/typeshed/stdlib/email/parser.pyi +++ b/mypy/typeshed/stdlib/email/parser.pyi @@ -1,25 +1,26 @@ +from _typeshed import SupportsRead from collections.abc import Callable from email.feedparser import BytesFeedParser as BytesFeedParser, FeedParser as FeedParser from email.message import Message from email.policy import Policy -from typing import BinaryIO, TextIO +from typing import IO __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] class Parser: def __init__(self, _class: Callable[[], Message] | None = None, *, policy: Policy = ...) -> None: ... - def parse(self, fp: TextIO, headersonly: bool = False) -> Message: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> Message: ... def parsestr(self, text: str, headersonly: bool = False) -> Message: ... class HeaderParser(Parser): - def parse(self, fp: TextIO, headersonly: bool = True) -> Message: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> Message: ... def parsestr(self, text: str, headersonly: bool = True) -> Message: ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: BinaryIO, headersonly: bool = False) -> Message: ... + def parse(self, fp: IO[bytes], headersonly: bool = False) -> Message: ... def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> Message: ... class BytesHeaderParser(BytesParser): - def parse(self, fp: BinaryIO, headersonly: bool = True) -> Message: ... + def parse(self, fp: IO[bytes], headersonly: bool = True) -> Message: ... def parsebytes(self, text: bytes | bytearray, headersonly: bool = True) -> Message: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 182076731ab2..b46fe429cacb 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -1,11 +1,12 @@ +import _typeshed import sys import types -from _typeshed import Self, SupportsKeysAndGetItem, Unused +from _typeshed import SupportsKeysAndGetItem, Unused from abc import ABCMeta from builtins import property as _builtins_property from collections.abc import Iterable, Iterator, Mapping from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] @@ -80,7 +81,7 @@ class _EnumDict(dict[str, Any]): class EnumMeta(ABCMeta): if sys.version_info >= (3, 11): def __new__( - metacls: type[Self], + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, @@ -88,11 +89,13 @@ class EnumMeta(ABCMeta): boundary: FlagBoundary | None = None, _simple: bool = False, **kwds: Any, - ) -> Self: ... + ) -> _typeshed.Self: ... elif sys.version_info >= (3, 9): - def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any) -> Self: ... + def __new__( + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any + ) -> _typeshed.Self: ... else: - def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> Self: ... + def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ... if sys.version_info >= (3, 9): @classmethod @@ -174,7 +177,7 @@ class Enum(metaclass=EnumMeta): # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` # (see #7752, #2539, mypy/#5788), # and in practice using `object` here has the same effect as using `Any`. - def __new__(cls: type[Self], value: object) -> Self: ... + def __new__(cls, value: object) -> Self: ... def __dir__(self) -> list[str]: ... def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... @@ -191,7 +194,7 @@ class IntEnum(int, _IntEnumBase): _value_: int @_magic_enum_attr def value(self) -> int: ... - def __new__(cls: type[Self], value: int) -> Self: ... + def __new__(cls, value: int) -> Self: ... def unique(enumeration: _EnumerationT) -> _EnumerationT: ... @@ -202,7 +205,7 @@ class auto(IntFlag): _value_: Any @_magic_enum_attr def value(self) -> Any: ... - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... class Flag(Enum): _name_: str | None # type: ignore[assignment] @@ -211,14 +214,14 @@ class Flag(Enum): def name(self) -> str | None: ... # type: ignore[override] @_magic_enum_attr def value(self) -> int: ... - def __contains__(self: Self, other: Self) -> bool: ... + def __contains__(self, other: Self) -> bool: ... def __bool__(self) -> bool: ... - def __or__(self: Self, other: Self) -> Self: ... - def __and__(self: Self, other: Self) -> Self: ... - def __xor__(self: Self, other: Self) -> Self: ... - def __invert__(self: Self) -> Self: ... + def __or__(self, other: Self) -> Self: ... + def __and__(self, other: Self) -> Self: ... + def __xor__(self, other: Self) -> Self: ... + def __invert__(self) -> Self: ... if sys.version_info >= (3, 11): - def __iter__(self: Self) -> Iterator[Self]: ... + def __iter__(self) -> Iterator[Self]: ... def __len__(self) -> int: ... __ror__ = __or__ __rand__ = __and__ @@ -226,28 +229,28 @@ class Flag(Enum): if sys.version_info >= (3, 11): # The body of the class is the same, but the base classes are different. - class IntFlag(int, ReprEnum, Flag, boundary=KEEP): - def __new__(cls: type[Self], value: int) -> Self: ... - def __or__(self: Self, other: int) -> Self: ... - def __and__(self: Self, other: int) -> Self: ... - def __xor__(self: Self, other: int) -> Self: ... + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ else: - class IntFlag(int, Flag): - def __new__(cls: type[Self], value: int) -> Self: ... - def __or__(self: Self, other: int) -> Self: ... - def __and__(self: Self, other: int) -> Self: ... - def __xor__(self: Self, other: int) -> Self: ... + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): - def __new__(cls: type[Self], value: str) -> Self: ... + def __new__(cls, value: str) -> Self: ... _value_: str @_magic_enum_attr def value(self) -> str: ... diff --git a/mypy/typeshed/stdlib/fileinput.pyi b/mypy/typeshed/stdlib/fileinput.pyi index 17379e92ba5f..e9f3713b4eaf 100644 --- a/mypy/typeshed/stdlib/fileinput.pyi +++ b/mypy/typeshed/stdlib/fileinput.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import AnyStr_co, Self, StrOrBytesPath +from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, Protocol, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -289,11 +289,11 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): def __del__(self) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> AnyStr: ... if sys.version_info < (3, 11): def __getitem__(self, i: int) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/fractions.pyi b/mypy/typeshed/stdlib/fractions.pyi index 95e4aad0f9ca..97cefc916d9b 100644 --- a/mypy/typeshed/stdlib/fractions.pyi +++ b/mypy/typeshed/stdlib/fractions.pyi @@ -1,10 +1,9 @@ import sys -from _typeshed import Self from collections.abc import Callable from decimal import Decimal from numbers import Integral, Rational, Real from typing import Any, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias _ComparableNum: TypeAlias = int | float | Decimal | Real @@ -24,14 +23,14 @@ else: class Fraction(Rational): @overload def __new__( - cls: type[Self], numerator: int | Rational = 0, denominator: int | Rational | None = None, *, _normalize: bool = True + cls, numerator: int | Rational = 0, denominator: int | Rational | None = None, *, _normalize: bool = True ) -> Self: ... @overload - def __new__(cls: type[Self], __value: float | Decimal | str, *, _normalize: bool = True) -> Self: ... + def __new__(cls, __value: float | Decimal | str, *, _normalize: bool = True) -> Self: ... @classmethod - def from_float(cls: type[Self], f: float) -> Self: ... + def from_float(cls, f: float) -> Self: ... @classmethod - def from_decimal(cls: type[Self], dec: Decimal) -> Self: ... + def from_decimal(cls, dec: Decimal) -> Self: ... def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> tuple[int, int]: ... @@ -139,8 +138,8 @@ class Fraction(Rational): def __le__(a, b: _ComparableNum) -> bool: ... def __ge__(a, b: _ComparableNum) -> bool: ... def __bool__(a) -> bool: ... - def __copy__(self: Self) -> Self: ... - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... # Not actually defined within fractions.py, but provides more useful diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 6c33f1409822..76d9dc02a5da 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -1,11 +1,11 @@ import sys -from _typeshed import Self, SupportsRead, SupportsReadline +from _typeshed import SupportsRead, SupportsReadline from collections.abc import Callable, Iterable, Iterator from socket import socket from ssl import SSLContext from types import TracebackType from typing import Any, TextIO -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] @@ -36,7 +36,7 @@ class FTP: lastresp: str file: TextIO | None encoding: str - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 8778798144de..1214e349f605 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import IdentityFunction, Self, SupportsAllComparisons, SupportsItems +from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -79,7 +79,7 @@ class partial(Generic[_T]): def args(self) -> tuple[Any, ...]: ... @property def keywords(self) -> dict[str, Any]: ... - def __new__(cls: type[Self], __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -111,11 +111,11 @@ class _SingleDispatchCallable(Generic[_T]): # @fun.register(complex) # def _(arg, verbose=False): ... @overload - def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... # @fun.register # def _(arg: int, verbose=False): @overload - def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... # fun.register(int, lambda x: x) @overload def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... diff --git a/mypy/typeshed/stdlib/hashlib.pyi b/mypy/typeshed/stdlib/hashlib.pyi index 8292e319330a..18b1ab549764 100644 --- a/mypy/typeshed/stdlib/hashlib.pyi +++ b/mypy/typeshed/stdlib/hashlib.pyi @@ -1,8 +1,8 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Callable, Set as AbstractSet from typing import Protocol -from typing_extensions import final +from typing_extensions import Self, final if sys.version_info >= (3, 11): __all__ = ( @@ -56,7 +56,7 @@ class _Hash: @property def name(self) -> str: ... def __init__(self, data: ReadableBuffer = ...) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def update(self, __data: ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/heapq.pyi b/mypy/typeshed/stdlib/heapq.pyi index 9d7815507ea5..61418b3704d6 100644 --- a/mypy/typeshed/stdlib/heapq.pyi +++ b/mypy/typeshed/stdlib/heapq.pyi @@ -2,12 +2,13 @@ from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Iterable from typing import Any, TypeVar +from typing_extensions import Final __all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] _S = TypeVar("_S") -__about__: str +__about__: Final[str] def merge( *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index bb641875e55b..b1506b50e750 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -2,11 +2,11 @@ import email.message import io import ssl import types -from _typeshed import ReadableBuffer, Self, SupportsRead, WriteableBuffer +from _typeshed import ReadableBuffer, SupportsRead, WriteableBuffer from collections.abc import Callable, Iterable, Iterator, Mapping from socket import socket from typing import Any, BinaryIO, TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = [ "HTTPResponse", @@ -127,7 +127,7 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): def getheaders(self) -> list[tuple[str, str]]: ... def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index 8016d8bec5cd..1c2112dd37c8 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -1,7 +1,7 @@ import subprocess import sys import time -from _typeshed import ReadableBuffer, Self, _BufferWithLen +from _typeshed import ReadableBuffer, _BufferWithLen from builtins import list as _list # conflicts with a method named "list" from collections.abc import Callable from datetime import datetime @@ -10,7 +10,7 @@ from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType from typing import IO, Any, SupportsAbs, SupportsInt -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] @@ -69,7 +69,7 @@ class IMAP4: def delete(self, mailbox: str) -> _CommandResults: ... def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... def enable(self, capability: str) -> _CommandResults: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def expunge(self) -> _CommandResults: ... def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 78b79267d06e..3d0c2d38c4e9 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -133,7 +133,7 @@ if sys.version_info >= (3, 9): @overload @abstractmethod def open( - self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload @@ -142,9 +142,9 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeUpdating, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedRandom: ... @overload @abstractmethod @@ -152,9 +152,9 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeWriting, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedWriter: ... @overload @abstractmethod @@ -162,15 +162,15 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeReading, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @overload @abstractmethod def open( - self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = None, errors: None = None, newline: None = None ) -> BinaryIO: ... # Fallback if mode is not specified @overload diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index cc93aaeca365..083453cd3c9a 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -1,7 +1,7 @@ import abc import pathlib import sys -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import Iterable, Mapping from email.message import Message from importlib.abc import MetaPathFinder @@ -9,6 +9,7 @@ from os import PathLike from pathlib import Path from re import Pattern from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Self __all__ = [ "Distribution", @@ -86,13 +87,13 @@ if sys.version_info >= (3, 10): class SelectableGroups(dict[str, EntryPoints]): # use as dict is deprecated since 3.10 @classmethod - def load(cls: type[Self], eps: Iterable[EntryPoint]) -> Self: ... + def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property def groups(self) -> set[str]: ... @property def names(self) -> set[str]: ... @overload - def select(self: Self) -> Self: ... # type: ignore[misc] + def select(self) -> Self: ... # type: ignore[misc] @overload def select( self, @@ -132,7 +133,7 @@ class Distribution: @overload @classmethod def discover( - cls, *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any + cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... @staticmethod def at(path: StrPath) -> PathDistribution: ... @@ -185,7 +186,7 @@ def distribution(distribution_name: str) -> Distribution: ... def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload def distributions( - *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any + *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... if sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index 3b82e0b0af2a..2525ef4968ec 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -2,7 +2,6 @@ import dis import enum import sys import types -from _typeshed import Self from collections import OrderedDict from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Generator, Mapping, Sequence, Set as AbstractSet from types import ( @@ -25,8 +24,8 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing import Any, ClassVar, NamedTuple, Protocol, TypeVar, Union, overload -from typing_extensions import Literal, ParamSpec, TypeAlias, TypeGuard +from typing import Any, ClassVar, NamedTuple, Protocol, TypeVar, overload +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeGuard if sys.version_info >= (3, 11): __all__ = [ @@ -264,9 +263,9 @@ def isdatadescriptor(object: object) -> TypeGuard[_SupportsSet[Any, Any] | _Supp # # Retrieving source code # -_SourceObjectType: TypeAlias = Union[ - ModuleType, type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any] -] +_SourceObjectType: TypeAlias = ( + ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] +) def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... @@ -313,13 +312,11 @@ class Signature: def return_annotation(self) -> Any: ... def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... - def replace( - self: Self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ... - ) -> Self: ... + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... if sys.version_info >= (3, 10): @classmethod def from_callable( - cls: type[Self], + cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True, @@ -329,7 +326,7 @@ class Signature: ) -> Self: ... else: @classmethod - def from_callable(cls: type[Self], obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... def __eq__(self, other: object) -> bool: ... @@ -372,7 +369,7 @@ class Parameter: @property def annotation(self) -> Any: ... def replace( - self: Self, + self, *, name: str | type[_void] = ..., kind: _ParameterKind | type[_void] = ..., @@ -493,7 +490,7 @@ if sys.version_info >= (3, 11): class Traceback(_Traceback): positions: dis.Positions | None def __new__( - cls: type[Self], + cls, filename: str, lineno: int, function: str, @@ -514,7 +511,7 @@ if sys.version_info >= (3, 11): class FrameInfo(_FrameInfo): positions: dis.Positions | None def __new__( - cls: type[Self], + cls, frame: FrameType, filename: str, lineno: int, diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index 6e1b4be77b07..c3e07bacbe5a 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -2,12 +2,12 @@ import abc import builtins import codecs import sys -from _typeshed import FileDescriptorOrPath, ReadableBuffer, Self, WriteableBuffer +from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer from collections.abc import Callable, Iterable, Iterator from os import _Opener from types import TracebackType from typing import IO, Any, BinaryIO, TextIO -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = [ "BlockingIOError", @@ -51,7 +51,7 @@ class UnsupportedOperation(OSError, ValueError): ... class IOBase(metaclass=abc.ABCMeta): def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -100,7 +100,7 @@ class FileIO(RawIOBase, BinaryIO): def closefd(self) -> bool: ... def write(self, __b: ReadableBuffer) -> int: ... def read(self, __size: int = -1) -> bytes: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... @@ -108,23 +108,23 @@ class BytesIO(BufferedIOBase, BinaryIO): # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def getvalue(self) -> bytes: ... def getbuffer(self) -> memoryview: ... def read1(self, __size: int | None = -1) -> bytes: ... class BufferedReader(BufferedIOBase, BinaryIO): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def peek(self, __size: int = 0) -> bytes: ... class BufferedWriter(BufferedIOBase, BinaryIO): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def write(self, __buffer: ReadableBuffer) -> int: ... class BufferedRandom(BufferedReader, BufferedWriter): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def seek(self, __target: int, __whence: int = 0) -> int: ... # stubtest needs this class BufferedRWPair(BufferedIOBase): @@ -172,7 +172,7 @@ class TextIOWrapper(TextIOBase, TextIO): write_through: bool | None = None, ) -> None: ... # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __iter__(self) -> Iterator[str]: ... # type: ignore[override] def __next__(self) -> str: ... # type: ignore[override] def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 1de945db5d30..9f9662137765 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Container, Iterable, Iterator from typing import Any, Generic, SupportsInt, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias # Undocumented length constants IPV4LENGTH: Literal[32] @@ -34,20 +33,20 @@ class _IPAddressBase: class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... - def __add__(self: Self, other: int) -> Self: ... + def __add__(self, other: int) -> Self: ... def __int__(self) -> int: ... - def __sub__(self: Self, other: int) -> Self: ... + def __sub__(self, other: int) -> Self: ... def __format__(self, fmt: str) -> str: ... def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self) -> bool: ... + def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self: Self, other: Self) -> bool: ... - def __gt__(self: Self, other: Self) -> bool: ... - def __le__(self: Self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... @property def is_global(self) -> bool: ... @@ -76,20 +75,20 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self) -> bool: ... + def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self: Self, other: Self) -> bool: ... - def __gt__(self: Self, other: Self) -> bool: ... - def __le__(self: Self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def address_exclude(self: Self, other: Self) -> Iterator[Self]: ... + def address_exclude(self, other: Self) -> Iterator[Self]: ... @property def broadcast_address(self) -> _A: ... - def compare_networks(self: Self, other: Self) -> int: ... + def compare_networks(self, other: Self) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @@ -112,10 +111,10 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... @property def prefixlen(self) -> int: ... - def subnet_of(self: Self, other: Self) -> bool: ... - def supernet_of(self: Self, other: Self) -> bool: ... - def subnets(self: Self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... - def supernet(self: Self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... + def subnet_of(self, other: Self) -> bool: ... + def supernet_of(self, other: Self) -> bool: ... + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... @property def with_hostmask(self) -> str: ... @property diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index a16827a3adb8..c7b92c3aebb5 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Callable, Iterable, Iterator from typing import Any, Generic, SupportsComplex, SupportsFloat, SupportsInt, TypeVar, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -32,12 +31,12 @@ class count(Iterator[_N], Generic[_N]): @overload def __new__(cls, *, step: _N) -> count[_N]: ... def __next__(self) -> _N: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... class cycle(Iterator[_T], Generic[_T]): def __init__(self, __iterable: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... class repeat(Iterator[_T], Generic[_T]): @overload @@ -45,25 +44,25 @@ class repeat(Iterator[_T], Generic[_T]): @overload def __init__(self, object: _T, times: int) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __length_hint__(self) -> int: ... class accumulate(Iterator[_T], Generic[_T]): if sys.version_info >= (3, 8): @overload - def __init__(self, iterable: Iterable[_T], func: None = ..., *, initial: _T | None = ...) -> None: ... + def __init__(self, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> None: ... @overload def __init__(self, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> None: ... else: def __init__(self, iterable: Iterable[_T], func: Callable[[_T, _T], _T] | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable def from_iterable(cls: type[Any], __iterable: Iterable[Iterable[_S]]) -> chain[_S]: ... @@ -72,25 +71,25 @@ class chain(Iterator[_T], Generic[_T]): class compress(Iterator[_T], Generic[_T]): def __init__(self, data: Iterable[_T], selectors: Iterable[Any]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class dropwhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class filterfalse(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T] | None, __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]): @overload - def __new__(cls, iterable: Iterable[_T1], key: None = ...) -> groupby[_T1, _T1]: ... + def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, Iterator[_S]]: ... class islice(Iterator[_T], Generic[_T]): @@ -98,17 +97,17 @@ class islice(Iterator[_T], Generic[_T]): def __init__(self, __iterable: Iterable[_T], __stop: int | None) -> None: ... @overload def __init__(self, __iterable: Iterable[_T], __start: int | None, __stop: int | None, __step: int | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class starmap(Iterator[_T], Generic[_T]): def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class takewhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... def tee(__iterable: Iterable[_T], __n: int = 2) -> tuple[Iterator[_T], ...]: ... @@ -190,7 +189,7 @@ class zip_longest(Iterator[_T_co], Generic[_T_co]): *iterables: Iterable[_T], fillvalue: _T, ) -> zip_longest[tuple[_T, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class product(Iterator[_T_co], Generic[_T_co]): @@ -239,12 +238,12 @@ class product(Iterator[_T_co], Generic[_T_co]): def __new__(cls, *iterables: Iterable[_T1], repeat: int) -> product[tuple[_T1, ...]]: ... @overload def __new__(cls, *iterables: Iterable[Any], repeat: int = ...) -> product[tuple[Any, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class permutations(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, ...]: ... class combinations(Iterator[_T_co], Generic[_T_co]): @@ -258,22 +257,22 @@ class combinations(Iterator[_T_co], Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, ...]: ... if sys.version_info >= (3, 10): class pairwise(Iterator[_T_co], Generic[_T_co]): def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... if sys.version_info >= (3, 12): class batched(Iterator[_T_co], Generic[_T_co]): - def __new__(cls: type[Self], iterable: Iterable[_T_co], n: int) -> Self: ... - def __iter__(self: Self) -> Self: ... + def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/mypy/typeshed/stdlib/keyword.pyi b/mypy/typeshed/stdlib/keyword.pyi index c17c58012fd1..46c386048858 100644 --- a/mypy/typeshed/stdlib/keyword.pyi +++ b/mypy/typeshed/stdlib/keyword.pyi @@ -1,5 +1,6 @@ import sys from collections.abc import Sequence +from typing_extensions import Final if sys.version_info >= (3, 9): __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] @@ -8,8 +9,13 @@ else: def iskeyword(s: str) -> bool: ... -kwlist: Sequence[str] +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +kwlist: Final[Sequence[str]] if sys.version_info >= (3, 9): def issoftkeyword(s: str) -> bool: ... - softkwlist: Sequence[str] + + # a list at runtime, but you're not meant to mutate it; + # type it as a sequence + softkwlist: Final[Sequence[str]] diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi index aa0dd687659d..bef0a7922683 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,5 +1,5 @@ -from _typeshed import Self, StrPath -from typing_extensions import TypeAlias +from _typeshed import StrPath +from typing_extensions import Self, TypeAlias _Label: TypeAlias = tuple[int, str | None] _DFA: TypeAlias = list[list[tuple[int, int]]] @@ -17,7 +17,7 @@ class Grammar: start: int def dump(self, filename: StrPath) -> None: ... def load(self, filename: StrPath) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def report(self) -> None: ... opmap_raw: str diff --git a/mypy/typeshed/stdlib/lib2to3/pytree.pyi b/mypy/typeshed/stdlib/lib2to3/pytree.pyi index 5cf7db146e46..4f756c9768db 100644 --- a/mypy/typeshed/stdlib/lib2to3/pytree.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pytree.pyi @@ -1,8 +1,7 @@ -from _typeshed import Self from collections.abc import Iterator from lib2to3.pgen2.grammar import Grammar from typing import Any -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias _NL: TypeAlias = Node | Leaf _Context: TypeAlias = tuple[str, int, int] @@ -21,8 +20,8 @@ class Base: was_changed: bool was_checked: bool def __eq__(self, other: object) -> bool: ... - def _eq(self: Self, other: Self) -> bool: ... - def clone(self: Self) -> Self: ... + def _eq(self, other: Self) -> bool: ... + def clone(self) -> Self: ... def post_order(self) -> Iterator[_NL]: ... def pre_order(self) -> Iterator[_NL]: ... def replace(self, new: _NL | list[_NL]) -> None: ... diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 231700653a32..c74afa45ded1 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -1,14 +1,14 @@ import sys import threading -from _typeshed import Self, StrPath, SupportsWrite +from _typeshed import StrPath, SupportsWrite from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from io import TextIOWrapper from re import Pattern from string import Template from time import struct_time from types import FrameType, TracebackType -from typing import Any, ClassVar, Generic, TextIO, TypeVar, Union, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, ClassVar, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 11): from types import GenericAlias @@ -61,7 +61,7 @@ __all__ = [ if sys.version_info >= (3, 11): __all__ += ["getLevelNamesMapping"] -_SysExcInfoType: TypeAlias = Union[tuple[type[BaseException], BaseException, TracebackType | None], tuple[None, None, None]] +_SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException _ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] _FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] @@ -110,7 +110,7 @@ class Logger(Filterer): def setLevel(self, level: _Level) -> None: ... def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... - def getChild(self: Self, suffix: str) -> Self: ... # see python/typing#980 + def getChild(self, suffix: str) -> Self: ... # see python/typing#980 if sys.version_info >= (3, 8): def debug( self, diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index 2feb28a8e743..34bd6f3f8db1 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -1,8 +1,8 @@ import io -from _typeshed import ReadableBuffer, Self, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Mapping, Sequence from typing import IO, Any, TextIO, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final __all__ = [ "CHECK_NONE", @@ -115,7 +115,7 @@ class LZMAFile(io.BufferedIOBase, IO[bytes]): preset: int | None = None, filters: _FilterChain | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def peek(self, size: int = -1) -> bytes: ... def read(self, size: int | None = -1) -> bytes: ... def read1(self, size: int = -1) -> bytes: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 2fe9060e7b7c..8053fad88ea5 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -1,12 +1,12 @@ import email.message import io import sys -from _typeshed import Self, StrPath, SupportsNoArgReadline, SupportsRead +from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, Protocol, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -235,7 +235,7 @@ class _ProxyFile(Generic[AnyStr]): def tell(self) -> int: ... def seek(self, offset: int, whence: int = 0) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index da5d1a95a6f6..21f05c908479 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -1,31 +1,31 @@ import builtins import types from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite -from typing import Any, Union +from typing import Any from typing_extensions import TypeAlias version: int -_Marshallable: TypeAlias = Union[ +_Marshallable: TypeAlias = ( # handled in w_object() in marshal.c - None, - type[StopIteration], - builtins.ellipsis, - bool, + None + | type[StopIteration] + | builtins.ellipsis + | bool # handled in w_complex_object() in marshal.c - int, - float, - complex, - bytes, - str, - tuple[_Marshallable, ...], - list[Any], - dict[Any, Any], - set[Any], - frozenset[_Marshallable], - types.CodeType, - ReadableBuffer, -] + | int + | float + | complex + | bytes + | str + | tuple[_Marshallable, ...] + | list[Any] + | dict[Any, Any] + | set[Any] + | frozenset[_Marshallable] + | types.CodeType + | ReadableBuffer +) def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = 4) -> None: ... def load(__file: SupportsRead[bytes]) -> Any: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 273cd0c6f4d4..c74ad3cda6db 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -1,7 +1,8 @@ import sys -from _typeshed import ReadableBuffer, Self, Unused +from _typeshed import ReadableBuffer, Unused from collections.abc import Iterable, Iterator, Sized from typing import NoReturn, overload +from typing_extensions import Self ACCESS_DEFAULT: int ACCESS_READ: int @@ -73,7 +74,7 @@ class mmap(Iterable[int], Sized): # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... if sys.version_info >= (3, 8) and sys.platform != "win32": diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index 392e3168aaaa..d034373712e0 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -1,15 +1,15 @@ import socket import sys import types -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Iterable -from typing import Any, Union -from typing_extensions import SupportsIndex, TypeAlias +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias __all__ = ["Client", "Listener", "Pipe", "wait"] # https://docs.python.org/3/library/multiprocessing.html#address-formats -_Address: TypeAlias = Union[str, tuple[str, int]] +_Address: TypeAlias = str | tuple[str, int] class _ConnectionBase: def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @@ -27,7 +27,7 @@ class _ConnectionBase: def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... def recv(self) -> Any: ... def poll(self, timeout: float | None = 0.0) -> bool: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -47,7 +47,7 @@ class Listener: def address(self) -> _Address: ... @property def last_accepted(self) -> _Address | None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi index 1630472b3b06..fcd03a657319 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi @@ -1,14 +1,13 @@ -from _typeshed import Self from queue import Queue from types import TracebackType -from typing import Any, Union -from typing_extensions import TypeAlias +from typing import Any +from typing_extensions import Self, TypeAlias __all__ = ["Client", "Listener", "Pipe"] families: list[None] -_Address: TypeAlias = Union[str, tuple[str, int]] +_Address: TypeAlias = str | tuple[str, int] class Connection: _in: Any @@ -17,7 +16,7 @@ class Connection: recv_bytes: Any send: Any send_bytes: Any - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -29,7 +28,7 @@ class Listener: _backlog_queue: Queue[Any] | None @property def address(self) -> Queue[Any] | None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index 1696714d187b..e035a1875650 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -1,11 +1,11 @@ import queue import sys import threading -from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence from types import TracebackType from typing import Any, AnyStr, ClassVar, Generic, TypeVar, overload -from typing_extensions import SupportsIndex, TypeAlias +from typing_extensions import Self, SupportsIndex, TypeAlias from .connection import Connection from .context import BaseContext @@ -111,13 +111,13 @@ class BaseListProxy(BaseProxy, MutableSequence[_T]): # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = None, reverse: bool = ...) -> None: ... @overload def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... class ListProxy(BaseListProxy[_T]): - def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... # type: ignore[override] + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: SupportsIndex) -> Self: ... # type: ignore[override] # Returned by BaseManager.get_server() class Server: @@ -165,7 +165,7 @@ class BaseManager: method_to_typeid: Mapping[str, str] | None = None, create_method: bool = True, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi index 3e2d0c3cd51e..a19dd555e254 100644 --- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi @@ -1,9 +1,8 @@ import sys -from _typeshed import Self from collections.abc import Callable, Iterable, Iterator, Mapping from types import TracebackType from typing import Any, Generic, TypeVar -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -62,7 +61,7 @@ class IMapIterator(Iterator[_T]): else: def __init__(self, cache: dict[int, IMapIterator[Any]]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def next(self, timeout: float | None = None) -> _T: ... def __next__(self, timeout: float | None = None) -> _T: ... @@ -109,7 +108,7 @@ class Pool: def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi index 841c947360e8..ae6e2a0ed19f 100644 --- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Iterable from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -29,7 +29,7 @@ class ShareableList(Generic[_SLT]): def __init__(self, sequence: Iterable[_SLT], *, name: str | None = None) -> None: ... def __getitem__(self, position: int) -> _SLT: ... def __setitem__(self, position: int, value: _SLT) -> None: ... - def __reduce__(self: Self) -> tuple[Self, tuple[_SLT, ...]]: ... + def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... def __len__(self) -> int: ... @property def format(self) -> str: ... diff --git a/mypy/typeshed/stdlib/nntplib.pyi b/mypy/typeshed/stdlib/nntplib.pyi index 02e743ea9d1e..f948c1430c90 100644 --- a/mypy/typeshed/stdlib/nntplib.pyi +++ b/mypy/typeshed/stdlib/nntplib.pyi @@ -2,11 +2,11 @@ import datetime import socket import ssl import sys -from _typeshed import Self, Unused +from _typeshed import Unused from builtins import list as _list # conflicts with a method named "list" from collections.abc import Iterable from typing import IO, Any, NamedTuple -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "NNTP", @@ -72,7 +72,7 @@ class NNTP: usenetrc: bool = False, timeout: float = ..., ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> dict[str, _list[str]]: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index b1b9db9ae2a7..595b78789c6a 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -11,7 +11,6 @@ from _typeshed import ( OpenBinaryModeWriting, OpenTextMode, ReadableBuffer, - Self, StrOrBytesPath, StrPath, SupportsLenAndGetItem, @@ -26,7 +25,7 @@ from contextlib import AbstractContextManager from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper as _TextIOWrapper from subprocess import Popen from typing import IO, Any, AnyStr, BinaryIO, Generic, NoReturn, Protocol, TypeVar, overload, runtime_checkable -from typing_extensions import Final, Literal, TypeAlias, final +from typing_extensions import Final, Literal, Self, TypeAlias, final from . import path as _path @@ -245,9 +244,9 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): # overloading MutableMapping.update in stdlib/typing.pyi # The type: ignore is needed due to incompatible __or__/__ior__ signatures @overload # type: ignore[misc] - def __ior__(self: Self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... environ: _Environ[str] if sys.platform != "win32": @@ -530,8 +529,8 @@ def fdopen( mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, - errors: None = ..., - newline: None = ..., + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> FileIO: ... @@ -541,8 +540,8 @@ def fdopen( mode: OpenBinaryModeUpdating, buffering: Literal[-1, 1] = -1, encoding: None = None, - errors: None = ..., - newline: None = ..., + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedRandom: ... @@ -552,8 +551,8 @@ def fdopen( mode: OpenBinaryModeWriting, buffering: Literal[-1, 1] = -1, encoding: None = None, - errors: None = ..., - newline: None = ..., + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedWriter: ... @@ -563,8 +562,8 @@ def fdopen( mode: OpenBinaryModeReading, buffering: Literal[-1, 1] = -1, encoding: None = None, - errors: None = ..., - newline: None = ..., + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedReader: ... @@ -574,8 +573,8 @@ def fdopen( mode: OpenBinaryMode, buffering: int = -1, encoding: None = None, - errors: None = ..., - newline: None = ..., + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BinaryIO: ... @@ -958,7 +957,7 @@ if sys.platform != "win32": class sched_param(structseq[int], tuple[int]): if sys.version_info >= (3, 10): __match_args__: Final = ("sched_priority",) - def __new__(cls: type[Self], sched_priority: int) -> Self: ... + def __new__(cls, sched_priority: int) -> Self: ... @property def sched_priority(self) -> int: ... @@ -1001,7 +1000,7 @@ if sys.version_info >= (3, 8): path: str | None def __init__(self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object]) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def add_dll_directory(path: str) -> _AddedDllDirectory: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 5220a142fb13..114678ed574d 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -6,7 +6,6 @@ from _typeshed import ( OpenBinaryModeWriting, OpenTextMode, ReadableBuffer, - Self, StrOrBytesPath, StrPath, ) @@ -15,7 +14,7 @@ from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWra from os import PathLike, stat_result from types import TracebackType from typing import IO, Any, BinaryIO, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -39,15 +38,15 @@ class PurePath(PathLike[str]): def suffixes(self) -> list[str]: ... @property def stem(self) -> str: ... - def __new__(cls: type[Self], *args: StrPath) -> Self: ... + def __new__(cls, *args: StrPath) -> Self: ... def __eq__(self, other: object) -> bool: ... def __fspath__(self) -> str: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... - def __truediv__(self: Self, key: StrPath) -> Self: ... - def __rtruediv__(self: Self, key: StrPath) -> Self: ... + def __truediv__(self, key: StrPath) -> Self: ... + def __rtruediv__(self, key: StrPath) -> Self: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... @@ -57,17 +56,17 @@ class PurePath(PathLike[str]): def is_relative_to(self, *other: StrPath) -> bool: ... def match(self, path_pattern: str) -> bool: ... - def relative_to(self: Self, *other: StrPath) -> Self: ... - def with_name(self: Self, name: str) -> Self: ... + def relative_to(self, *other: StrPath) -> Self: ... + def with_name(self, name: str) -> Self: ... if sys.version_info >= (3, 9): - def with_stem(self: Self, stem: str) -> Self: ... + def with_stem(self, stem: str) -> Self: ... - def with_suffix(self: Self, suffix: str) -> Self: ... - def joinpath(self: Self, *other: StrPath) -> Self: ... + def with_suffix(self, suffix: str) -> Self: ... + def joinpath(self, *other: StrPath) -> Self: ... @property - def parents(self: Self) -> Sequence[Self]: ... + def parents(self) -> Sequence[Self]: ... @property - def parent(self: Self) -> Self: ... + def parent(self) -> Self: ... if sys.version_info >= (3, 9) and sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... @@ -75,11 +74,11 @@ class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): - def __new__(cls: type[Self], *args: StrPath, **kwargs: Any) -> Self: ... - def __enter__(self: Self) -> Self: ... + def __new__(cls, *args: StrPath, **kwargs: Any) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @classmethod - def cwd(cls: type[Self]) -> Self: ... + def cwd(cls) -> Self: ... if sys.version_info >= (3, 10): def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... @@ -88,7 +87,7 @@ class Path(PurePath): def chmod(self, mode: int) -> None: ... def exists(self) -> bool: ... - def glob(self: Self, pattern: str) -> Generator[Self, None, None]: ... + def glob(self, pattern: str) -> Generator[Self, None, None]: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... @@ -96,7 +95,7 @@ class Path(PurePath): def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... - def iterdir(self: Self) -> Generator[Self, None, None]: ... + def iterdir(self) -> Generator[Self, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> stat_result: ... def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... @@ -163,16 +162,16 @@ class Path(PurePath): def is_mount(self) -> bool: ... if sys.version_info >= (3, 9): - def readlink(self: Self) -> Self: ... + def readlink(self) -> Self: ... if sys.version_info >= (3, 8): - def rename(self: Self, target: str | PurePath) -> Self: ... - def replace(self: Self, target: str | PurePath) -> Self: ... + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... else: def rename(self, target: str | PurePath) -> None: ... def replace(self, target: str | PurePath) -> None: ... - def resolve(self: Self, strict: bool = False) -> Self: ... - def rglob(self: Self, pattern: str) -> Generator[Self, None, None]: ... + def resolve(self, strict: bool = False) -> Self: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): @@ -185,9 +184,9 @@ class Path(PurePath): def unlink(self) -> None: ... @classmethod - def home(cls: type[Self]) -> Self: ... - def absolute(self: Self) -> Self: ... - def expanduser(self: Self) -> Self: ... + def home(cls) -> Self: ... + def absolute(self) -> Self: ... + def expanduser(self) -> Self: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... def samefile(self, other_path: StrPath) -> bool: ... @@ -202,7 +201,7 @@ class Path(PurePath): def link_to(self, target: StrOrBytesPath) -> None: ... if sys.version_info >= (3, 12): def walk( - self: Self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... + self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... ) -> Iterator[tuple[Self, list[str], list[str]]]: ... class PosixPath(Path, PurePosixPath): ... diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index a2b6636d8665..e2871bb54fa0 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -1,13 +1,12 @@ import signal import sys -from _typeshed import Self from bdb import Bdb from cmd import Cmd from collections.abc import Callable, Iterable, Mapping, Sequence from inspect import _SourceObjectType from types import CodeType, FrameType, TracebackType from typing import IO, Any, ClassVar, TypeVar -from typing_extensions import ParamSpec +from typing_extensions import ParamSpec, Self __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] @@ -173,4 +172,4 @@ def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): - def __repr__(self: Self) -> Self: ... + def __repr__(self) -> Self: ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index dc098cae97b7..57c4cb03e484 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping -from typing import Any, ClassVar, Protocol, SupportsBytes, Union +from typing import Any, ClassVar, Protocol, SupportsBytes from typing_extensions import SupportsIndex, TypeAlias, final __all__ = [ @@ -142,13 +142,13 @@ class PickleError(Exception): ... class PicklingError(PickleError): ... class UnpicklingError(PickleError): ... -_ReducedType: TypeAlias = Union[ - str, - tuple[Callable[..., Any], tuple[Any, ...]], - tuple[Callable[..., Any], tuple[Any, ...], Any], - tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None], - tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None], -] +_ReducedType: TypeAlias = ( + str + | tuple[Callable[..., Any], tuple[Any, ...]] + | tuple[Callable[..., Any], tuple[Any, ...], Any] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None] +) class Pickler: fast: bool diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 54ce3dc61abc..5b76c935f76e 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping from datetime import datetime from enum import Enum from typing import IO, Any +from typing_extensions import Self if sys.version_info >= (3, 9): __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] @@ -100,7 +101,7 @@ if sys.version_info >= (3, 8): data: int def __init__(self, data: int) -> None: ... def __index__(self) -> int: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[int]]: ... + def __reduce__(self) -> tuple[type[Self], tuple[int]]: ... def __eq__(self, other: object) -> bool: ... class InvalidFileException(ValueError): diff --git a/mypy/typeshed/stdlib/profile.pyi b/mypy/typeshed/stdlib/profile.pyi index 8d6e9b220587..6ae375004158 100644 --- a/mypy/typeshed/stdlib/profile.pyi +++ b/mypy/typeshed/stdlib/profile.pyi @@ -1,7 +1,7 @@ -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Callable from typing import Any, TypeVar -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] @@ -25,7 +25,7 @@ class Profile: def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... - def run(self: Self, cmd: str) -> Self: ... - def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/mypy/typeshed/stdlib/pstats.pyi b/mypy/typeshed/stdlib/pstats.pyi index f4f331934565..5d25d1bb3641 100644 --- a/mypy/typeshed/stdlib/pstats.pyi +++ b/mypy/typeshed/stdlib/pstats.pyi @@ -1,11 +1,11 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Iterable from cProfile import Profile as _cProfile from enum import Enum from profile import Profile from typing import IO, Any, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] @@ -47,7 +47,7 @@ _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] class Stats: sort_arg_dict_default: _SortArgDict def __init__( - self: Self, + self, __arg: None | str | Profile | _cProfile = ..., *args: None | str | Profile | _cProfile | Self, stream: IO[Any] | None = None, @@ -55,24 +55,24 @@ class Stats: def init(self, arg: None | str | Profile | _cProfile) -> None: ... def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... def get_top_level_stats(self) -> None: ... - def add(self: Self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... + def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... def dump_stats(self, filename: StrOrBytesPath) -> None: ... def get_sort_arg_defs(self) -> _SortArgDict: ... @overload - def sort_stats(self: Self, field: Literal[-1, 0, 1, 2]) -> Self: ... + def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload - def sort_stats(self: Self, *field: str) -> Self: ... - def reverse_order(self: Self) -> Self: ... - def strip_dirs(self: Self) -> Self: ... + def sort_stats(self, *field: str) -> Self: ... + def reverse_order(self) -> Self: ... + def strip_dirs(self) -> Self: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... if sys.version_info >= (3, 9): def get_stats_profile(self) -> StatsProfile: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... - def print_stats(self: Self, *amount: _Selector) -> Self: ... - def print_callees(self: Self, *amount: _Selector) -> Self: ... - def print_callers(self: Self, *amount: _Selector) -> Self: ... + def print_stats(self, *amount: _Selector) -> Self: ... + def print_callees(self, *amount: _Selector) -> Self: ... + def print_callers(self, *amount: _Selector) -> Self: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = "->") -> None: ... def print_title(self) -> None: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index 9bcd8659fc8c..c6893d50c66a 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -6,16 +6,16 @@ from collections.abc import Callable, Container, Mapping, MutableMapping from reprlib import Repr from types import MethodType, ModuleType, TracebackType from typing import IO, Any, AnyStr, NoReturn, TypeVar -from typing_extensions import TypeGuard +from typing_extensions import Final, TypeGuard __all__ = ["help"] _T = TypeVar("_T") -__author__: str -__date__: str -__version__: str -__credits__: str +__author__: Final[str] +__date__: Final[str] +__version__: Final[str] +__credits__: Final[str] def pathdirs() -> list[str]: ... def getdoc(object: object) -> str: ... diff --git a/mypy/typeshed/stdlib/quopri.pyi b/mypy/typeshed/stdlib/quopri.pyi index 336f733f64c0..b652e139bd0e 100644 --- a/mypy/typeshed/stdlib/quopri.pyi +++ b/mypy/typeshed/stdlib/quopri.pyi @@ -5,7 +5,7 @@ __all__ = ["encode", "decode", "encodestring", "decodestring"] class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... -def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: int = False) -> None: ... -def encodestring(s: ReadableBuffer, quotetabs: int = False, header: int = False) -> bytes: ... -def decode(input: _Input, output: SupportsWrite[bytes], header: int = False) -> None: ... -def decodestring(s: str | ReadableBuffer, header: int = False) -> bytes: ... +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... +def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi index f45ac7383e5d..4e53141ade84 100644 --- a/mypy/typeshed/stdlib/re.pyi +++ b/mypy/typeshed/stdlib/re.pyi @@ -72,7 +72,7 @@ class Match(Generic[AnyStr]): def expand(self, template: AnyStr) -> AnyStr: ... # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def group(self, __group: Literal[0] = ...) -> AnyStr: ... + def group(self, __group: Literal[0] = 0) -> AnyStr: ... @overload def group(self, __group: str | int) -> AnyStr | Any: ... @overload diff --git a/mypy/typeshed/stdlib/runpy.pyi b/mypy/typeshed/stdlib/runpy.pyi index 7efc194c8c66..d4406ea4ac41 100644 --- a/mypy/typeshed/stdlib/runpy.pyi +++ b/mypy/typeshed/stdlib/runpy.pyi @@ -1,6 +1,7 @@ -from _typeshed import Self, Unused +from _typeshed import Unused from types import ModuleType from typing import Any +from typing_extensions import Self __all__ = ["run_module", "run_path"] @@ -8,7 +9,7 @@ class _TempModule: mod_name: str module: ModuleType def __init__(self, mod_name: str) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... class _ModifiedArgv0: diff --git a/mypy/typeshed/stdlib/select.pyi b/mypy/typeshed/stdlib/select.pyi index d02651320cf6..412fd71ee38d 100644 --- a/mypy/typeshed/stdlib/select.pyi +++ b/mypy/typeshed/stdlib/select.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import FileDescriptorLike, Self +from _typeshed import FileDescriptorLike from collections.abc import Iterable from types import TracebackType from typing import Any -from typing_extensions import final +from typing_extensions import Self, final if sys.platform != "win32": PIPE_BUF: int @@ -106,7 +106,7 @@ if sys.platform == "linux": @final class epoll: def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None = None, diff --git a/mypy/typeshed/stdlib/selectors.pyi b/mypy/typeshed/stdlib/selectors.pyi index e15780fadee1..90a923f09355 100644 --- a/mypy/typeshed/stdlib/selectors.pyi +++ b/mypy/typeshed/stdlib/selectors.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import FileDescriptor, FileDescriptorLike, Self, Unused +from _typeshed import FileDescriptor, FileDescriptorLike, Unused from abc import ABCMeta, abstractmethod from collections.abc import Mapping from typing import Any, NamedTuple -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias _EventMask: TypeAlias = int @@ -28,7 +28,7 @@ class BaseSelector(metaclass=ABCMeta): def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... @abstractmethod def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... class SelectSelector(BaseSelector): diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi index d55e08bffa16..82d0b03f4049 100644 --- a/mypy/typeshed/stdlib/shelve.pyi +++ b/mypy/typeshed/stdlib/shelve.pyi @@ -1,8 +1,8 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from dbm import _TFlags from types import TracebackType from typing import Any, TypeVar, overload +from typing_extensions import Self __all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] @@ -23,7 +23,7 @@ class Shelf(MutableMapping[str, _VT]): def __setitem__(self, key: str, value: _VT) -> None: ... def __delitem__(self, key: str) -> None: ... def __contains__(self, key: str) -> bool: ... # type: ignore[override] - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/shlex.pyi b/mypy/typeshed/stdlib/shlex.pyi index 9a578d186be8..fa04932db676 100644 --- a/mypy/typeshed/stdlib/shlex.pyi +++ b/mypy/typeshed/stdlib/shlex.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Iterable from typing import TextIO +from typing_extensions import Self if sys.version_info >= (3, 8): __all__ = ["shlex", "split", "quote", "join"] @@ -46,5 +46,5 @@ class shlex(Iterable[str]): def push_source(self, newstream: str | TextIO, newfile: str | None = None) -> None: ... def pop_source(self) -> None: ... def error_leader(self, infile: str | None = None, lineno: int | None = None) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index e0d7364c6b4e..e411d47016b6 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -3,7 +3,7 @@ from _typeshed import structseq from collections.abc import Callable, Iterable from enum import IntEnum from types import FrameType -from typing import Any, Union +from typing import Any from typing_extensions import Final, Never, TypeAlias, final NSIG: int @@ -62,7 +62,7 @@ SIG_DFL: Handlers SIG_IGN: Handlers _SIGNUM: TypeAlias = int | Signals -_HANDLER: TypeAlias = Union[Callable[[int, FrameType | None], Any], int, Handlers, None] +_HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None def default_int_handler(__signalnum: int, __frame: FrameType | None) -> Never: ... @@ -113,7 +113,7 @@ else: SIGXCPU: Signals SIGXFSZ: Signals - class ItimerError(IOError): ... + class ItimerError(OSError): ... ITIMER_PROF: int ITIMER_REAL: int ITIMER_VIRTUAL: int @@ -178,4 +178,4 @@ def set_wakeup_fd(fd: int, *, warn_on_full_buffer: bool = ...) -> int: ... if sys.version_info >= (3, 9): if sys.platform == "linux": - def pidfd_send_signal(__pidfd: int, __sig: int, __siginfo: None = ..., __flags: int = ...) -> None: ... + def pidfd_send_signal(__pidfd: int, __sig: int, __siginfo: None = None, __flags: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index d0d674242bf8..0d7595fc1d6d 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -1,6 +1,6 @@ import sys from _socket import _Address as _SourceAddress -from _typeshed import ReadableBuffer, Self, _BufferWithLen +from _typeshed import ReadableBuffer, _BufferWithLen from collections.abc import Sequence from email.message import Message as _Message from re import Pattern @@ -8,7 +8,7 @@ from socket import socket from ssl import SSLContext from types import TracebackType from typing import Any, Protocol, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = [ "SMTPException", @@ -68,7 +68,7 @@ def quotedata(data: str) -> str: ... class _AuthObject(Protocol): @overload - def __call__(self, challenge: None = ...) -> str | None: ... + def __call__(self, challenge: None = None) -> str | None: ... @overload def __call__(self, challenge: bytes) -> str: ... @@ -95,7 +95,7 @@ class SMTP: timeout: float = ..., source_address: _SourceAddress | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 4481f398867c..dbc1d46ec1d4 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -112,12 +112,12 @@ from _socket import ( setdefaulttimeout as setdefaulttimeout, timeout as timeout, ) -from _typeshed import ReadableBuffer, Self, Unused, WriteableBuffer +from _typeshed import ReadableBuffer, Unused, WriteableBuffer from collections.abc import Iterable from enum import IntEnum, IntFlag from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper from typing import Any, Protocol, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.platform != "darwin" or sys.version_info >= (3, 9): from _socket import ( @@ -657,9 +657,9 @@ class socket(_socket.socket): def __init__( self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def dup(self: Self) -> Self: ... # noqa: F811 + def dup(self) -> Self: ... # noqa: F811 def accept(self) -> tuple[socket, _RetAddress]: ... # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index b35f1553fb44..3f0bb0eea0ce 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -1,11 +1,11 @@ import sys import types from _socket import _Address, _RetAddress -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Callable from socket import socket as _socket -from typing import Any, BinaryIO, ClassVar, Union -from typing_extensions import TypeAlias +from typing import Any, BinaryIO, ClassVar +from typing_extensions import Self, TypeAlias __all__ = [ "BaseServer", @@ -29,7 +29,7 @@ if sys.platform != "win32": "UnixStreamServer", ] -_RequestType: TypeAlias = Union[_socket, tuple[bytes, _socket]] +_RequestType: TypeAlias = _socket | tuple[bytes, _socket] _AfUnixAddress: TypeAlias = str | ReadableBuffer # adddress acceptable for an AF_UNIX socket _AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket @@ -43,13 +43,13 @@ class BaseServer: socket_type: int timeout: float | None def __init__( - self: Self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] + self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: ... # It is not actually a `@property`, but we need a `Self` type: @property - def RequestHandlerClass(self: Self) -> Callable[[Any, _RetAddress, Self], BaseRequestHandler]: ... + def RequestHandlerClass(self) -> Callable[[Any, _RetAddress, Self], BaseRequestHandler]: ... @RequestHandlerClass.setter - def RequestHandlerClass(self: Self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... + def RequestHandlerClass(self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = 0.5) -> None: ... @@ -63,7 +63,7 @@ class BaseServer: def server_activate(self) -> None: ... def server_bind(self) -> None: ... def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -76,7 +76,7 @@ class TCPServer(BaseServer): allow_reuse_port: bool server_address: _AfInetAddress # type: ignore[assignment] def __init__( - self: Self, + self, server_address: _AfInetAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, @@ -91,7 +91,7 @@ if sys.platform != "win32": class UnixStreamServer(BaseServer): server_address: _AfUnixAddress # type: ignore[assignment] def __init__( - self: Self, + self, server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, @@ -100,7 +100,7 @@ if sys.platform != "win32": class UnixDatagramServer(BaseServer): server_address: _AfUnixAddress # type: ignore[assignment] def __init__( - self: Self, + self, server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 01274d6e2a60..26188445547e 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -1,11 +1,11 @@ import sqlite3 import sys -from _typeshed import Incomplete, ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem, Unused +from _typeshed import Incomplete, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType from typing import Any, Protocol, TypeVar, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final _T = TypeVar("_T") _CursorT = TypeVar("_CursorT", bound=Cursor) @@ -324,7 +324,7 @@ class Connection: def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData] | None) -> None: ... @overload - def cursor(self, cursorClass: None = ...) -> Cursor: ... + def cursor(self, cursorClass: None = None) -> Cursor: ... @overload def cursor(self, cursorClass: Callable[[], _CursorT]) -> _CursorT: ... def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ... @@ -358,7 +358,7 @@ class Connection: def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ... def __call__(self, __sql: str) -> _Statement: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __type: type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None ) -> Literal[False]: ... @@ -377,8 +377,8 @@ class Cursor(Iterator[Any]): def rowcount(self) -> int: ... def __init__(self, __cursor: Connection) -> None: ... def close(self) -> None: ... - def execute(self: Self, __sql: str, __parameters: _Parameters = ...) -> Self: ... - def executemany(self: Self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... + def execute(self, __sql: str, __parameters: _Parameters = ...) -> Self: ... + def executemany(self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... def executescript(self, __sql_script: str) -> Cursor: ... def fetchall(self) -> list[Any]: ... def fetchmany(self, size: int | None = 1) -> list[Any]: ... @@ -387,7 +387,7 @@ class Cursor(Iterator[Any]): def fetchone(self) -> Any: ... def setinputsizes(self, __sizes: Unused) -> None: ... # does nothing def setoutputsize(self, __size: Unused, __column: Unused = None) -> None: ... # does nothing - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> Any: ... class DataError(DatabaseError): ... @@ -452,7 +452,7 @@ if sys.version_info >= (3, 11): # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END def seek(self, __offset: int, __origin: int = 0) -> None: ... def __len__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, __typ: object, __val: object, __tb: object) -> Literal[False]: ... def __getitem__(self, __item: SupportsIndex | slice) -> int: ... def __setitem__(self, __item: SupportsIndex | slice, __value: int) -> None: ... diff --git a/mypy/typeshed/stdlib/sre_constants.pyi b/mypy/typeshed/stdlib/sre_constants.pyi index fe25eaf9728e..d522372c438c 100644 --- a/mypy/typeshed/stdlib/sre_constants.pyi +++ b/mypy/typeshed/stdlib/sre_constants.pyi @@ -1,6 +1,6 @@ import sys -from _typeshed import Self from typing import Any +from typing_extensions import Self MAXGROUPS: int @@ -16,7 +16,7 @@ class error(Exception): class _NamedIntConstant(int): name: Any - def __new__(cls: type[Self], value: int, name: str) -> Self: ... + def __new__(cls, value: int, name: str) -> Self: ... MAXREPEAT: _NamedIntConstant OPCODES: list[_NamedIntConstant] diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index f8b97fb60eb7..bbf8a4c6d65a 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -1,17 +1,17 @@ import enum import socket import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Callable, Iterable -from typing import Any, NamedTuple, Union, overload -from typing_extensions import Literal, TypeAlias, TypedDict, final +from typing import Any, NamedTuple, overload +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final _PCTRTT: TypeAlias = tuple[tuple[str, str], ...] _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] _PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] _PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] -_PasswordType: TypeAlias = Union[Callable[[], str | bytes | bytearray], str, bytes, bytearray] +_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray _SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] @@ -297,9 +297,9 @@ class _ASN1Object(NamedTuple): longname: str oid: str @classmethod - def fromnid(cls: type[Self], nid: int) -> Self: ... + def fromnid(cls, nid: int) -> Self: ... @classmethod - def fromname(cls: type[Self], name: str) -> Self: ... + def fromname(cls, name: str) -> Self: ... class Purpose(_ASN1Object, enum.Enum): SERVER_AUTH: _ASN1Object @@ -383,9 +383,9 @@ class SSLContext: if sys.version_info >= (3, 10): # Using the default (None) for the `protocol` parameter is deprecated, # but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted - def __new__(cls: type[Self], protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ... else: - def __new__(cls: type[Self], protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... def cert_store_stats(self) -> dict[str, int]: ... def load_cert_chain( diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi index 4ef950b9b4de..1358b1f90d7d 100644 --- a/mypy/typeshed/stdlib/statistics.pyi +++ b/mypy/typeshed/stdlib/statistics.pyi @@ -1,10 +1,10 @@ import sys -from _typeshed import Self, SupportsRichComparisonT +from _typeshed import SupportsRichComparisonT from collections.abc import Hashable, Iterable, Sequence from decimal import Decimal from fractions import Fraction from typing import Any, NamedTuple, SupportsFloat, TypeVar -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "StatisticsError", @@ -93,7 +93,7 @@ if sys.version_info >= (3, 8): @property def variance(self) -> float: ... @classmethod - def from_samples(cls: type[Self], data: Iterable[SupportsFloat]) -> Self: ... + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... def samples(self, n: int, *, seed: Any | None = None) -> list[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 35a7b7e34f6b..3940fad7b915 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -257,8 +257,8 @@ if sys.version_info >= (3, 11): *, capture_output: bool = False, check: bool = False, - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, input: ReadableBuffer | None = None, text: Literal[None, False] = ..., timeout: float | None = None, @@ -461,8 +461,8 @@ elif sys.version_info >= (3, 10): *, capture_output: bool = False, check: bool = False, - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, input: ReadableBuffer | None = None, text: Literal[None, False] = ..., timeout: float | None = None, @@ -659,8 +659,8 @@ elif sys.version_info >= (3, 9): *, capture_output: bool = False, check: bool = False, - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, input: ReadableBuffer | None = None, text: Literal[None, False] = ..., timeout: float | None = None, @@ -838,8 +838,8 @@ else: *, capture_output: bool = False, check: bool = False, - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, input: ReadableBuffer | None = None, text: Literal[None, False] = ..., timeout: float | None = None, @@ -994,7 +994,7 @@ if sys.version_info >= (3, 11): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1025,7 +1025,7 @@ elif sys.version_info >= (3, 10): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1055,7 +1055,7 @@ elif sys.version_info >= (3, 9): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1083,7 +1083,7 @@ else: def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1251,8 +1251,8 @@ if sys.version_info >= (3, 11): *, timeout: float | None = None, input: _InputString | None = ..., - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1437,8 +1437,8 @@ elif sys.version_info >= (3, 10): *, timeout: float | None = None, input: _InputString | None = ..., - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1617,8 +1617,8 @@ elif sys.version_info >= (3, 9): *, timeout: float | None = None, input: _InputString | None = ..., - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1778,8 +1778,8 @@ else: *, timeout: float | None = None, input: _InputString | None = ..., - encoding: None = ..., - errors: None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., ) -> bytes: ... @overload @@ -2560,7 +2560,7 @@ class Popen(Generic[AnyStr]): def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/sunau.pyi b/mypy/typeshed/stdlib/sunau.pyi index 7702443b0c1c..6109b368c01a 100644 --- a/mypy/typeshed/stdlib/sunau.pyi +++ b/mypy/typeshed/stdlib/sunau.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self, Unused +from _typeshed import Unused from typing import IO, Any, NamedTuple, NoReturn, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias _File: TypeAlias = str | IO[bytes] @@ -32,7 +32,7 @@ class _sunau_params(NamedTuple): class Au_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> IO[bytes] | None: ... def rewind(self) -> None: ... @@ -52,7 +52,7 @@ class Au_read: class Au_write: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index 725f66794cf6..e12881599b4a 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -7,7 +7,7 @@ from importlib.machinery import ModuleSpec from io import TextIOWrapper from types import FrameType, ModuleType, TracebackType from typing import Any, NoReturn, Protocol, TextIO, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Final, Literal, TypeAlias, final _T = TypeVar("_T") @@ -62,9 +62,10 @@ stdout: TextIO stderr: TextIO if sys.version_info >= (3, 10): stdlib_module_names: frozenset[str] -__stdin__: TextIOWrapper -__stdout__: TextIOWrapper -__stderr__: TextIOWrapper + +__stdin__: Final[TextIOWrapper] # Contains the original value of stdin +__stdout__: Final[TextIOWrapper] # Contains the original value of stdout +__stderr__: Final[TextIOWrapper] # Contains the original value of stderr tracebacklimit: int version: str api_version: int @@ -277,11 +278,10 @@ if sys.platform == "win32": def intern(__string: str) -> str: ... def is_finalizing() -> bool: ... - -__breakpointhook__: Any # contains the original value of breakpointhook - def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... +__breakpointhook__ = breakpointhook # Contains the original value of breakpointhook + if sys.platform != "win32": def setdlopenflags(__flags: int) -> None: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 0aca7956a580..5cf1d55cac63 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -1,13 +1,13 @@ import bz2 import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath -from builtins import list as _list, type as Type # aliases to avoid name clashes with fields named "type" or "list" +from _typeshed import StrOrBytesPath, StrPath +from builtins import list as _list # aliases to avoid name clashes with fields named "type" or "list" from collections.abc import Callable, Iterable, Iterator, Mapping from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj from types import TracebackType from typing import IO, ClassVar, Protocol, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = [ "TarFile", @@ -141,14 +141,14 @@ class TarFile: errorlevel: int | None = None, copybufsize: int | None = None, # undocumented ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __iter__(self) -> Iterator[TarInfo]: ... @classmethod def open( - cls: type[Self], + cls, name: StrOrBytesPath | None = None, mode: str = "r", fileobj: IO[bytes] | None = None, # depends on mode @@ -166,7 +166,7 @@ class TarFile: ) -> Self: ... @classmethod def taropen( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["r", "a", "w", "x"] = "r", fileobj: _Fileobj | None = None, @@ -184,7 +184,7 @@ class TarFile: @overload @classmethod def gzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["r"] = "r", fileobj: _GzipReadableFileobj | None = None, @@ -202,7 +202,7 @@ class TarFile: @overload @classmethod def gzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["w", "x"], fileobj: _GzipWritableFileobj | None = None, @@ -220,7 +220,7 @@ class TarFile: @overload @classmethod def bz2open( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["w", "x"], fileobj: _Bz2WritableFileobj | None = None, @@ -238,7 +238,7 @@ class TarFile: @overload @classmethod def bz2open( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["r"] = "r", fileobj: _Bz2ReadableFileobj | None = None, @@ -255,7 +255,7 @@ class TarFile: ) -> Self: ... @classmethod def xzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["r", "w", "x"] = "r", fileobj: IO[bytes] | None = None, @@ -346,9 +346,9 @@ class TarInfo: pax_headers: Mapping[str, str] def __init__(self, name: str = "") -> None: ... @classmethod - def frombuf(cls: Type[Self], buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod - def fromtarfile(cls: Type[Self], tarfile: TarFile) -> Self: ... + def fromtarfile(cls, tarfile: TarFile) -> Self: ... @property def linkpath(self) -> str: ... @linkpath.setter diff --git a/mypy/typeshed/stdlib/telnetlib.pyi b/mypy/typeshed/stdlib/telnetlib.pyi index bcf9ef3693b2..10f6e4930f75 100644 --- a/mypy/typeshed/stdlib/telnetlib.pyi +++ b/mypy/typeshed/stdlib/telnetlib.pyi @@ -1,9 +1,9 @@ import socket -from _typeshed import Self from collections.abc import Callable, Sequence from re import Match, Pattern from types import TracebackType from typing import Any +from typing_extensions import Self __all__ = ["Telnet"] @@ -115,7 +115,7 @@ class Telnet: def expect( self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = None ) -> tuple[int, Match[bytes] | None, bytes]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index 9dc23be2557f..dbff6d632d02 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -1,10 +1,10 @@ import io import sys -from _typeshed import BytesPath, GenericPath, Self, StrPath, WriteableBuffer +from _typeshed import BytesPath, GenericPath, StrPath, WriteableBuffer from collections.abc import Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -186,7 +186,7 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): name: str delete: bool def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... def close(self) -> None: ... @@ -369,7 +369,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): ) -> None: ... def rollover(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... # These methods are copied from the abstract methods of IO, because # SpooledTemporaryFile implements IO. diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index fdacf0097008..1d30e4b73c23 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -6,7 +6,7 @@ from enum import Enum from tkinter.constants import * from tkinter.font import _FontDescription from types import TracebackType -from typing import Any, Generic, NamedTuple, Protocol, TypeVar, Union, overload +from typing import Any, Generic, NamedTuple, Protocol, TypeVar, overload from typing_extensions import Literal, TypeAlias, TypedDict if sys.version_info >= (3, 9): @@ -179,7 +179,7 @@ _CanvasItemId: TypeAlias = int _Color: TypeAlias = str # typically '#rrggbb', '#rgb' or color names. _Compound: TypeAlias = Literal["top", "left", "center", "right", "bottom", "none"] # -compound in manual page named 'options' # manual page: Tk_GetCursor -_Cursor: TypeAlias = Union[str, tuple[str], tuple[str, str], tuple[str, str, str], tuple[str, str, str, str]] +_Cursor: TypeAlias = str | tuple[str] | tuple[str, str] | tuple[str, str, str] | tuple[str, str, str, str] # example when it's sequence: entry['invalidcommand'] = [entry.register(print), '%P'] _EntryValidateCommand: TypeAlias = str | list[str] | tuple[str, ...] | Callable[[], bool] _GridIndex: TypeAlias = int | str @@ -188,7 +188,7 @@ _Relief: TypeAlias = Literal["raised", "sunken", "flat", "ridge", "solid", "groo _ScreenUnits: TypeAlias = str | float # Often the right type instead of int. Manual page: Tk_GetPixels # -xscrollcommand and -yscrollcommand in 'options' manual page _XYScrollCommand: TypeAlias = str | Callable[[float, float], object] -_TakeFocusValue: TypeAlias = Union[int, Literal[""], Callable[[str], bool | None]] # -takefocus in manual page named 'options' +_TakeFocusValue: TypeAlias = int | Literal[""] | Callable[[str], bool | None] # -takefocus in manual page named 'options' if sys.version_info >= (3, 11): class _VersionInfoType(NamedTuple): @@ -421,7 +421,7 @@ class Misc: def winfo_viewable(self) -> bool: ... def winfo_visual(self) -> str: ... def winfo_visualid(self) -> str: ... - def winfo_visualsavailable(self, includeids: int = False) -> list[tuple[str, int]]: ... + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... def winfo_vrootheight(self) -> int: ... def winfo_vrootwidth(self) -> int: ... def winfo_vrootx(self) -> int: ... diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index bd477535f41f..61ebc0e2734f 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -4,7 +4,7 @@ import tkinter from _typeshed import Incomplete from collections.abc import Callable from tkinter.font import _FontDescription -from typing import Any, Union, overload +from typing import Any, overload from typing_extensions import Literal, TypeAlias, TypedDict __all__ = [ @@ -38,13 +38,13 @@ __all__ = [ def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... def setup_master(master: Incomplete | None = None): ... -_Padding: TypeAlias = Union[ - tkinter._ScreenUnits, - tuple[tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits], -] +_Padding: TypeAlias = ( + tkinter._ScreenUnits + | tuple[tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] +) # from ttk_widget (aka ttk::widget) manual page, differs from tkinter._Compound _TtkCompound: TypeAlias = Literal["text", "image", tkinter._Compound] diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index cdda50c0a1b3..4483a8c2a1b0 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Generator, Iterable, Iterator, Mapping from types import FrameType, TracebackType from typing import Any, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "extract_stack", @@ -129,7 +129,7 @@ class TracebackException: ) -> None: ... @classmethod def from_exception( - cls: type[Self], + cls, exc: BaseException, *, limit: int | None = ..., @@ -154,7 +154,7 @@ class TracebackException: ) -> None: ... @classmethod def from_exception( - cls: type[Self], + cls, exc: BaseException, *, limit: int | None = ..., @@ -176,7 +176,7 @@ class TracebackException: ) -> None: ... @classmethod def from_exception( - cls: type[Self], exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... + cls, exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... ) -> Self: ... def __eq__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/tracemalloc.pyi b/mypy/typeshed/stdlib/tracemalloc.pyi index d7214de285f8..3dc8b8603fe5 100644 --- a/mypy/typeshed/stdlib/tracemalloc.pyi +++ b/mypy/typeshed/stdlib/tracemalloc.pyi @@ -1,7 +1,7 @@ import sys from _tracemalloc import * from collections.abc import Sequence -from typing import Any, Union, overload +from typing import Any, overload from typing_extensions import SupportsIndex, TypeAlias def get_object_traceback(obj: object) -> Traceback | None: ... @@ -67,7 +67,7 @@ class Frame: def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... if sys.version_info >= (3, 9): - _TraceTuple: TypeAlias = Union[tuple[int, int, Sequence[_FrameTuple], int | None], tuple[int, int, Sequence[_FrameTuple]]] + _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] else: _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple]] diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 1259ca6fb4cc..8017c8290fb9 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -1,8 +1,7 @@ -from _typeshed import Self from collections.abc import Callable, Sequence from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar -from typing import Any, ClassVar, Union, overload -from typing_extensions import TypeAlias +from typing import Any, ClassVar, overload +from typing_extensions import Self, TypeAlias __all__ = [ "ScrolledCanvas", @@ -133,7 +132,7 @@ __all__ = [ # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return # Any instead. -_Color: TypeAlias = Union[str, tuple[float, float, float]] +_Color: TypeAlias = str | tuple[float, float, float] _AnyColor: TypeAlias = Any # TODO: Replace this with a TypedDict once it becomes standardized. @@ -143,7 +142,7 @@ _Speed: TypeAlias = str | float _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] class Vec2D(tuple[float, float]): - def __new__(cls: type[Self], x: float, y: float) -> Self: ... + def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] def __mul__(self, other: Vec2D) -> float: ... @@ -366,7 +365,7 @@ class RawTurtle(TPen, TNavigator): def setundobuffer(self, size: int | None) -> None: ... def undobufferentries(self) -> int: ... def clear(self) -> None: ... - def clone(self: Self) -> Self: ... + def clone(self) -> Self: ... @overload def shape(self, name: None = None) -> str: ... @overload @@ -411,7 +410,7 @@ class RawTurtle(TPen, TNavigator): def end_poly(self) -> None: ... def get_poly(self) -> _PolygonCoords | None: ... def getscreen(self) -> TurtleScreen: ... - def getturtle(self: Self) -> Self: ... + def getturtle(self) -> Self: ... getpen = getturtle def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 5fb24106685e..d529b3d9ad1a 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -363,7 +363,7 @@ class GeneratorType(Generator[_T_co, _T_contra, _V_co]): self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... @final class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): @@ -379,7 +379,7 @@ class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - async def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + async def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... def aclose(self) -> Coroutine[Any, Any, None]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -402,7 +402,7 @@ class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... class _StaticFunctionType: # Fictional type to correct the type of MethodType.__func__. diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index eaa566582fb4..d06b081d3ddc 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -1,6 +1,6 @@ -import _typeshed import collections # Needed by aliases like DefaultDict, see mypy issue 2986 import sys +import typing_extensions from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod @@ -501,7 +501,7 @@ class MutableSequence(Sequence[_T], Generic[_T]): def reverse(self) -> None: ... def pop(self, index: int = -1) -> _T: ... def remove(self, value: _T) -> None: ... - def __iadd__(self: _typeshed.Self, values: Iterable[_T]) -> _typeshed.Self: ... + def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... class AbstractSet(Collection[_T_co], Generic[_T_co]): @abstractmethod @@ -527,10 +527,10 @@ class MutableSet(AbstractSet[_T], Generic[_T]): def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, value: _T) -> None: ... - def __ior__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] - def __iand__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... - def __ixor__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] - def __isub__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... + def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __isub__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... class MappingView(Sized): def __init__(self, mapping: Mapping[Any, Any]) -> None: ... # undocumented @@ -719,7 +719,7 @@ class ByteString(Sequence[int], metaclass=ABCMeta): ... # Functions -_get_type_hints_obj_allowed_types = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed +_get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 object | Callable[..., Any] | FunctionType @@ -783,7 +783,7 @@ class NamedTuple(tuple[Any, ...]): @overload def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload - def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... @classmethod def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... if sys.version_info >= (3, 8): @@ -791,7 +791,7 @@ class NamedTuple(tuple[Any, ...]): else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... - def _replace(self: _typeshed.Self, **kwargs: Any) -> _typeshed.Self: ... + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... # Internal mypy fallback type for all typed dicts (does not exist at runtime) # N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict @@ -801,7 +801,7 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): if sys.version_info >= (3, 9): __required_keys__: ClassVar[frozenset[str]] __optional_keys__: ClassVar[frozenset[str]] - def copy(self: _typeshed.Self) -> _typeshed.Self: ... + def copy(self) -> typing_extensions.Self: ... # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: _Never, default: object) -> object: ... @@ -813,8 +813,8 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... if sys.version_info >= (3, 9): - def __or__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... - def __ior__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + def __or__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... @_final class ForwardRef: diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 73a41f16600d..bf3892d5709e 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -1,4 +1,3 @@ -import _typeshed import abc import collections import sys @@ -129,7 +128,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __required_keys__: ClassVar[frozenset[str]] __optional_keys__: ClassVar[frozenset[str]] __total__: ClassVar[bool] - def copy(self: _typeshed.Self) -> _typeshed.Self: ... + def copy(self) -> Self: ... # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: Never, default: object) -> object: ... @@ -141,8 +140,8 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... if sys.version_info >= (3, 9): - def __or__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... - def __ior__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + def __or__(self, __value: Self) -> Self: ... + def __ior__(self, __value: Self) -> Self: ... # TypedDict is a (non-subscriptable) special form. TypedDict: object @@ -242,15 +241,15 @@ else: @overload def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload - def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... @classmethod - def _make(cls: type[_typeshed.Self], iterable: Iterable[Any]) -> _typeshed.Self: ... + def _make(cls, iterable: Iterable[Any]) -> Self: ... if sys.version_info >= (3, 8): def _asdict(self) -> dict[str, Any]: ... else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... - def _replace(self: _typeshed.Self, **kwargs: Any) -> _typeshed.Self: ... + def _replace(self, **kwargs: Any) -> Self: ... # New things in 3.xx # The `default` parameter was added to TypeVar, ParamSpec, and TypeVarTuple (PEP 696) diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index 4569d6584fd6..5a1f7fe6638d 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import ReadOnlyBuffer -from typing import Any, TypeVar +from typing import Any, TypeVar, overload from typing_extensions import Literal, TypeAlias, final ucd_3_2_0: UCD @@ -14,9 +14,15 @@ _T = TypeVar("_T") def bidirectional(__chr: str) -> str: ... def category(__chr: str) -> str: ... def combining(__chr: str) -> int: ... -def decimal(__chr: str, __default: _T = ...) -> int | _T: ... +@overload +def decimal(__chr: str) -> int: ... +@overload +def decimal(__chr: str, __default: _T) -> int | _T: ... def decomposition(__chr: str) -> str: ... -def digit(__chr: str, __default: _T = ...) -> int | _T: ... +@overload +def digit(__chr: str) -> int: ... +@overload +def digit(__chr: str, __default: _T) -> int | _T: ... _EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] @@ -27,26 +33,44 @@ if sys.version_info >= (3, 8): def lookup(__name: str | ReadOnlyBuffer) -> str: ... def mirrored(__chr: str) -> int: ... -def name(__chr: str, __default: _T = ...) -> str | _T: ... +@overload +def name(__chr: str) -> str: ... +@overload +def name(__chr: str, __default: _T) -> str | _T: ... def normalize(__form: str, __unistr: str) -> str: ... -def numeric(__chr: str, __default: _T = ...) -> float | _T: ... +@overload +def numeric(__chr: str) -> float: ... +@overload +def numeric(__chr: str, __default: _T) -> float | _T: ... @final class UCD: # The methods below are constructed from the same array in C - # (unicodedata_functions) and hence identical to the methods above. + # (unicodedata_functions) and hence identical to the functions above. unidata_version: str def bidirectional(self, __chr: str) -> str: ... def category(self, __chr: str) -> str: ... def combining(self, __chr: str) -> int: ... - def decimal(self, __chr: str, __default: _T = ...) -> int | _T: ... + @overload + def decimal(self, __chr: str) -> int: ... + @overload + def decimal(self, __chr: str, __default: _T) -> int | _T: ... def decomposition(self, __chr: str) -> str: ... - def digit(self, __chr: str, __default: _T = ...) -> int | _T: ... + @overload + def digit(self, __chr: str) -> int: ... + @overload + def digit(self, __chr: str, __default: _T) -> int | _T: ... def east_asian_width(self, __chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(self, __form: str, __unistr: str) -> bool: ... def lookup(self, __name: str | ReadOnlyBuffer) -> str: ... def mirrored(self, __chr: str) -> int: ... - def name(self, __chr: str, __default: _T = ...) -> str | _T: ... + @overload + def name(self, __chr: str) -> str: ... + @overload + def name(self, __chr: str, __default: _T) -> str | _T: ... def normalize(self, __form: str, __unistr: str) -> str: ... - def numeric(self, __chr: str, __default: _T = ...) -> float | _T: ... + @overload + def numeric(self, __chr: str) -> float: ... + @overload + def numeric(self, __chr: str, __default: _T) -> float | _T: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 5b1bd9288659..8f8cf43385a8 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -1,26 +1,13 @@ import logging import sys import unittest.result -from _typeshed import Self, SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub +from _typeshed import SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet from contextlib import AbstractContextManager from re import Pattern from types import TracebackType -from typing import ( - Any, - AnyStr, - ClassVar, - Generic, - NamedTuple, - NoReturn, - Protocol, - SupportsAbs, - SupportsRound, - TypeVar, - Union, - overload, -) -from typing_extensions import ParamSpec, TypeAlias +from typing import Any, AnyStr, ClassVar, Generic, NamedTuple, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload +from typing_extensions import ParamSpec, Self, TypeAlias from warnings import WarningMessage if sys.version_info >= (3, 9): @@ -82,9 +69,9 @@ class SkipTest(Exception): class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... if sys.version_info >= (3, 10): - _IsInstanceClassInfo: TypeAlias = Union[type, UnionType, tuple[type | UnionType | tuple[Any, ...], ...]] + _IsInstanceClassInfo: TypeAlias = type | UnionType | tuple[type | UnionType | tuple[Any, ...], ...] else: - _IsInstanceClassInfo: TypeAlias = Union[type, tuple[type | tuple[Any, ...], ...]] + _IsInstanceClassInfo: TypeAlias = type | tuple[type | tuple[Any, ...], ...] class TestCase: failureException: type[BaseException] @@ -317,7 +304,7 @@ class FunctionTestCase(TestCase): class _AssertRaisesContext(Generic[_E]): exception: _E - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> bool: ... @@ -329,7 +316,7 @@ class _AssertWarnsContext: filename: str lineno: int warnings: list[WarningMessage] - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 54c79fd433d2..f0345c903a3b 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -1,10 +1,9 @@ import sys -from _typeshed import Self from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence from contextlib import _GeneratorContextManager from types import TracebackType from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Final, Literal, Self, TypeAlias _T = TypeVar("_T") _TT = TypeVar("_TT", bound=type[Any]) @@ -48,7 +47,7 @@ else: "seal", ) -__version__: str +__version__: Final[str] FILTER_DIR: Any @@ -68,12 +67,7 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs class _Call(tuple[Any, ...]): def __new__( - cls: type[Self], - value: _CallValue = ..., - name: str | None = "", - parent: Any | None = None, - two: bool = False, - from_kall: bool = True, + cls, value: _CallValue = ..., name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True ) -> Self: ... name: Any parent: Any @@ -107,8 +101,10 @@ class _CallList(list[_Call]): class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... +# We subclass with "Any" because mocks are explicitly designed to stand in for other types, +# something that can't be expressed with our static type system. class NonCallableMock(Base, Any): - def __new__(__cls: type[Self], *args: Any, **kw: Any) -> Self: ... + def __new__(__cls, *args: Any, **kw: Any) -> Self: ... def __init__( self, spec: list[str] | object | type[object] | None = None, @@ -437,9 +433,9 @@ def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... class PropertyMock(Mock): if sys.version_info >= (3, 8): - def __get__(self: Self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... + def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... else: - def __get__(self: Self, obj: _T, obj_type: type[_T] | None) -> Self: ... + def __get__(self, obj: _T, obj_type: type[_T] | None) -> Self: ... def __set__(self, obj: Any, value: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/urllib/error.pyi b/mypy/typeshed/stdlib/urllib/error.pyi index 8ea25680f1a4..89cec9bf289c 100644 --- a/mypy/typeshed/stdlib/urllib/error.pyi +++ b/mypy/typeshed/stdlib/urllib/error.pyi @@ -4,13 +4,15 @@ from urllib.response import addinfourl __all__ = ["URLError", "HTTPError", "ContentTooShortError"] -class URLError(IOError): +class URLError(OSError): reason: str | BaseException def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): @property - def headers(self) -> Message: ... # type: ignore[override] + def headers(self) -> Message: ... + @headers.setter + def headers(self, headers: Message) -> None: ... @property def reason(self) -> str: ... # type: ignore[override] code: int diff --git a/mypy/typeshed/stdlib/urllib/response.pyi b/mypy/typeshed/stdlib/urllib/response.pyi index 4db1b5649c7a..61ba687076b2 100644 --- a/mypy/typeshed/stdlib/urllib/response.pyi +++ b/mypy/typeshed/stdlib/urllib/response.pyi @@ -1,20 +1,21 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable from email.message import Message from types import TracebackType from typing import IO, Any, BinaryIO +from typing_extensions import Self __all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(BinaryIO): fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> bytes: ... def close(self) -> None: ... # These methods don't actually exist, but the class inherits at runtime from diff --git a/mypy/typeshed/stdlib/uu.pyi b/mypy/typeshed/stdlib/uu.pyi index 20e79bf3fec9..324053e04337 100644 --- a/mypy/typeshed/stdlib/uu.pyi +++ b/mypy/typeshed/stdlib/uu.pyi @@ -10,4 +10,4 @@ class Error(Exception): ... def encode( in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False ) -> None: ... -def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: int = False) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/wave.pyi b/mypy/typeshed/stdlib/wave.pyi index 3817ae09307f..0d004d6b2d8a 100644 --- a/mypy/typeshed/stdlib/wave.pyi +++ b/mypy/typeshed/stdlib/wave.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import ReadableBuffer, Self, Unused +from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["open", "Error", "Wave_read", "Wave_write"] @@ -24,7 +24,7 @@ class _wave_params(NamedTuple): class Wave_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> BinaryIO | None: ... def rewind(self) -> None: ... @@ -44,7 +44,7 @@ class Wave_read: class Wave_write: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index a0f35b4f51eb..1e0aac814dfb 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self, SupportsKeysAndGetItem +from _typeshed import SupportsKeysAndGetItem from _weakref import ( CallableProxyType as CallableProxyType, ProxyType as ProxyType, @@ -12,7 +12,7 @@ from _weakref import ( from _weakrefset import WeakSet as WeakSet from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping from typing import Any, Generic, TypeVar, overload -from typing_extensions import ParamSpec +from typing_extensions import ParamSpec, Self __all__ = [ "ref", @@ -41,7 +41,7 @@ _P = ParamSpec("_P") ProxyTypes: tuple[type[Any], ...] class WeakMethod(ref[_CallableT], Generic[_CallableT]): - def __new__(cls: type[Self], meth: _CallableT, callback: Callable[[_CallableT], object] | None = None) -> Self: ... + def __new__(cls, meth: _CallableT, callback: Callable[[_CallableT], object] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... @@ -63,7 +63,7 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> WeakValueDictionary[_KT, _VT]: ... __copy__ = copy - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] @@ -80,14 +80,14 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT # This __new__ method uses a non-standard name for the "cls" parameter - def __new__(type: type[Self], ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... + def __new__(type, ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): @@ -103,7 +103,7 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... __copy__ = copy - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] @@ -123,9 +123,9 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class finalize: # TODO: This is a good candidate for to be a `Generic[_P, _T]` class def __init__(self, __obj: object, __func: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... diff --git a/mypy/typeshed/stdlib/webbrowser.pyi b/mypy/typeshed/stdlib/webbrowser.pyi index d15ae49fd1e8..02edd42e7d59 100644 --- a/mypy/typeshed/stdlib/webbrowser.pyi +++ b/mypy/typeshed/stdlib/webbrowser.pyi @@ -65,4 +65,9 @@ if sys.platform == "darwin": def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` + if sys.version_info >= (3, 11): + def __init__(self, name: str = "default") -> None: ... + else: + def __init__(self, name: str) -> None: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi index 6377492babc7..5b2d09a3bebc 100644 --- a/mypy/typeshed/stdlib/winreg.pyi +++ b/mypy/typeshed/stdlib/winreg.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from types import TracebackType from typing import Any -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final if sys.platform == "win32": _KeyType: TypeAlias = HKEYType | int @@ -93,7 +92,7 @@ if sys.platform == "win32": class HKEYType: def __bool__(self) -> bool: ... def __int__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index d996f66984f9..7bbffb88c8f7 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,7 +1,7 @@ import sys import xml.dom -from _typeshed import Incomplete, ReadableBuffer, Self, SupportsRead, SupportsWrite -from typing_extensions import Literal +from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite +from typing_extensions import Literal, Self from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader @@ -46,7 +46,7 @@ class Node(xml.dom.Node): def setUserData(self, key, data, handler): ... childNodes: Incomplete def unlink(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, et, ev, tb) -> None: ... class DocumentFragment(Node): @@ -269,7 +269,7 @@ class DOMImplementation(DOMImplementationLS): def hasFeature(self, feature: str, version: str | None) -> bool: ... def createDocument(self, namespaceURI: str | None, qualifiedName: str | None, doctype: DocumentType | None) -> Document: ... def createDocumentType(self, qualifiedName: str | None, publicId: str, systemId: str) -> DocumentType: ... - def getInterface(self: Self, feature: str) -> Self | None: ... + def getInterface(self, feature: str) -> Self | None: ... class ElementInfo: tagName: Incomplete diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index a591258db801..ca981a00d25f 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer, SupportsRead, _T_co +from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable from typing import Any, NoReturn, Protocol from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler @@ -29,12 +29,19 @@ default_parser_list: list[str] if sys.version_info >= (3, 8): def make_parser(parser_list: Iterable[str] = ...) -> XMLReader: ... + def parse( + source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... else: def make_parser(parser_list: list[str] = ...) -> XMLReader: ... + def parse( + source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... -def parse( - source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, errorHandler: ErrorHandler = ... -) -> None: ... def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 536cd6382d0b..7bf701ae716d 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -2,13 +2,13 @@ import gzip import http.client import sys import time -from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite, _BufferWithLen +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite, _BufferWithLen from collections.abc import Callable, Iterable, Mapping from datetime import datetime from io import BytesIO from types import TracebackType -from typing import Any, Protocol, Union, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias class _SupportsTimeTuple(Protocol): def timetuple(self) -> time.struct_time: ... @@ -31,7 +31,7 @@ _Marshallable: TypeAlias = ( | Binary ) _XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time -_HostType: TypeAlias = Union[tuple[str, dict[str, str]], str] +_HostType: TypeAlias = tuple[str, dict[str, str]] | str def escape(s: str) -> str: ... # undocumented @@ -312,7 +312,7 @@ class ServerProxy: def __call__(self, attr: Literal["transport"]) -> Transport: ... @overload def __call__(self, attr: str) -> Callable[[], None] | Transport: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index 0cb6138dfddd..b969d0cf9e6a 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -1,11 +1,11 @@ import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath, _BufferWithLen +from _typeshed import StrOrBytesPath, StrPath, _BufferWithLen from collections.abc import Callable, Iterable, Iterator from os import PathLike from types import TracebackType from typing import IO, Any, Protocol, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "BadZipFile", @@ -150,7 +150,7 @@ class ZipFile: compresslevel: int | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... @@ -214,12 +214,10 @@ class ZipInfo: def __init__(self, filename: str = "NoName", date_time: _DateTuple = ...) -> None: ... if sys.version_info >= (3, 8): @classmethod - def from_file( - cls: type[Self], filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True - ) -> Self: ... + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... else: @classmethod - def from_file(cls: type[Self], filename: StrPath, arcname: StrPath | None = None) -> Self: ... + def from_file(cls, filename: StrPath, arcname: StrPath | None = None) -> Self: ... def is_dir(self) -> bool: ... def FileHeader(self, zip64: bool | None = None) -> bytes: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index 0bdf853f4069..fe994be3e8ff 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -1,7 +1,8 @@ -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import Iterable, Sequence from datetime import datetime, timedelta, tzinfo from typing import Any, Protocol +from typing_extensions import Self __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] @@ -14,9 +15,9 @@ class ZoneInfo(tzinfo): def key(self) -> str: ... def __init__(self, key: str) -> None: ... @classmethod - def no_cache(cls: type[Self], key: str) -> Self: ... + def no_cache(cls, key: str) -> Self: ... @classmethod - def from_file(cls: type[Self], __fobj: _IOBytes, key: str | None = ...) -> Self: ... + def from_file(cls, __fobj: _IOBytes, key: str | None = ...) -> Self: ... @classmethod def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... def tzname(self, __dt: datetime | None) -> str | None: ... From 874afd970d1eb270e050a38bcbb8b9b88f7f3f4f Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 26 Sep 2022 12:55:07 -0700 Subject: [PATCH 280/292] Remove use of LiteralString in builtins (#13743) --- mypy/typeshed/stdlib/builtins.pyi | 94 +------------------------------ 1 file changed, 1 insertion(+), 93 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index a8bedc8374bd..ec448f66308b 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -54,7 +54,7 @@ from typing import ( # noqa: Y022 overload, type_check_only, ) -from typing_extensions import Literal, LiteralString, Self, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -413,17 +413,8 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - @overload - def capitalize(self: LiteralString) -> LiteralString: ... - @overload def capitalize(self) -> str: ... # type: ignore[misc] - @overload - def casefold(self: LiteralString) -> LiteralString: ... - @overload def casefold(self) -> str: ... # type: ignore[misc] - @overload - def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -431,20 +422,11 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - @overload - def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -460,91 +442,32 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... - @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - @overload - def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def lower(self: LiteralString) -> LiteralString: ... - @overload def lower(self) -> str: ... # type: ignore[misc] - @overload - def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def replace( - self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 - ) -> LiteralString: ... - @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... - @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... - @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... - @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - @overload - def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def swapcase(self: LiteralString) -> LiteralString: ... - @overload def swapcase(self) -> str: ... # type: ignore[misc] - @overload - def title(self: LiteralString) -> LiteralString: ... - @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... - @overload - def upper(self: LiteralString) -> LiteralString: ... - @overload def upper(self) -> str: ... # type: ignore[misc] - @overload - def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... - @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -555,9 +478,6 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... - @overload - def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... - @overload def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] @@ -565,25 +485,13 @@ class str(Sequence[str]): def __ge__(self, __x: str) -> bool: ... def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... - @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... - @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... - @overload - def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... - @overload def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] - @overload - def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... - @overload - def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... From 3a240111eb412f879b4c129ebd8781b70b3ab582 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 12:47:21 -0700 Subject: [PATCH 281/292] Revert sum literal integer change (#13961) This is allegedly causing large performance problems, see 13821 typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing to undo. Patching this in typeshed also feels weird, since there's a more general soundness issue. If a typevar has a bound or constraint, we might not want to solve it to a Literal. If we can confirm the performance regression or fix the unsoundness within mypy, I might pursue upstreaming this in typeshed. (Reminder: add this to the sync_typeshed script once merged) --- mypy/typeshed/stdlib/builtins.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index ec448f66308b..7b8e25084c91 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1634,11 +1634,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... From f968d6ce073f03f3cf26417d6134f22aee3ad995 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 18 Nov 2022 01:08:36 -0800 Subject: [PATCH 282/292] Revert typeshed ctypes change (#14128) Since the plugin provides superior type checking: https://github.com/python/mypy/pull/13987#issuecomment-1310863427 --- mypy/typeshed/stdlib/ctypes/__init__.pyi | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index aaaacf287903..497e2f7db70b 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -271,11 +271,7 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - # Note: only available if _CT == c_char - @property - def raw(self) -> bytes: ... - @raw.setter - def raw(self, value: ReadableBuffer) -> None: ... + raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT From ef3187a64d10d1aacbf1d28171b4af00dcd1cb64 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 16 Feb 2023 19:04:25 -0800 Subject: [PATCH 283/292] Update commit hashes in sync typeshed script (#14720) #14719 fixed some merge conflicts --- misc/sync-typeshed.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 5981b6b8fd0c..86b0fd774e0c 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -179,9 +179,9 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "820c46a4d75ec5f6dc95c09845a317ff59c4b4bf", # LiteralString reverts - "af7604de58c4c4952fd51a7556a6c56466113010", # sum reverts - "fe40f814387fc671ba0cc679453b01eabeb7c112", # ctypes reverts + "874afd970", # LiteralString reverts + "3a240111e", # sum reverts + "f968d6ce0", # ctypes reverts ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) From c99133f405f286ed3429c809e9ae2cb3faaa2ceb Mon Sep 17 00:00:00 2001 From: Max Murin Date: Fri, 17 Feb 2023 17:05:35 -0800 Subject: [PATCH 284/292] Fix for bug with `in` operation on optionals in `no-strict-optional` mode (#14727) Fixes a bug introduced in https://github.com/python/mypy/pull/14384 wherein a union that includes `None` is no longer treated as a valid right-hand type for the `in` operator in `no-strict-optional` mode. (The reported error is `error: "None" has no attribute "__iter__" (not iterable) [attr-defined]`) --- mypy/checkexpr.py | 2 +- test-data/unit/check-optional.test | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 754ba6f093f5..38b5c2419d95 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2950,7 +2950,7 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: right_type = get_proper_type(right_type) item_types: Sequence[Type] = [right_type] if isinstance(right_type, UnionType): - item_types = list(right_type.items) + item_types = list(right_type.relevant_items()) sub_result = self.bool_type() diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index db07290f7b40..754c6b52ff19 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -1031,3 +1031,12 @@ def f1(b: bool) -> Optional[int]: class Defer: def __init__(self) -> None: self.defer = 10 + +[case testOptionalIterator] +# mypy: no-strict-optional +from typing import Optional, List + +x: Optional[List[int]] +if 3 in x: + pass + From 8a487ff248783fdc2fc0c1852a15f9fd6fbc12e8 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Fri, 17 Feb 2023 17:57:24 -0800 Subject: [PATCH 285/292] Sync typeshed (#14733) Syncing typeshed again to make sure https://github.com/python/typeshed/pull/9746 gets into the 1.1 release. --- mypy/typeshed/stdlib/urllib/parse.pyi | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index cd1d9347d6f7..50c5d44cdd80 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -192,26 +192,22 @@ def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> A @overload def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... @overload -def urlparse(url: bytes | bytearray, scheme: bytes | bytearray | None, allow_fragments: bool = True) -> ParseResultBytes: ... -@overload def urlparse( - url: None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... if sys.version_info >= (3, 11): @overload - def urlsplit(url: bytes, scheme: bytes | None, allow_fragments: bool = True) -> SplitResultBytes: ... - @overload - def urlsplit(url: None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True) -> SplitResultBytes: ... + def urlsplit( + url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... else: - @overload - def urlsplit(url: bytes | bytearray, scheme: bytes | bytearray | None, allow_fragments: bool = True) -> SplitResultBytes: ... @overload def urlsplit( - url: None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> SplitResultBytes: ... @overload From c03e979ca06c3bf082a4cd07458a1bc3205dc5e5 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 18 Feb 2023 12:39:25 -0500 Subject: [PATCH 286/292] Stubtest: Link directly to line (#14437) This format allows editors and terminals that support linking to specific lines in files to go directly to the right line. --- docs/source/stubtest.rst | 4 ++-- mypy/stubtest.py | 8 ++++---- mypy/test/data.py | 4 +--- mypy/test/teststubtest.py | 6 +++--- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/docs/source/stubtest.rst b/docs/source/stubtest.rst index a8279eb6c239..f3c036f56c06 100644 --- a/docs/source/stubtest.rst +++ b/docs/source/stubtest.rst @@ -42,7 +42,7 @@ test Python's official collection of library stubs, `typeshed `_. .. warning:: - + stubtest will import and execute Python code from the packages it checks. Example @@ -69,7 +69,7 @@ Here's a quick example of what stubtest can do: error: library.foo is inconsistent, runtime argument "x" has a default value but stub argument does not Stub: at line 3 def (x: builtins.int) - Runtime: at line 3 in file ~/library.py + Runtime: in file ~/library.py:3 def (x=None) error: library.x variable differs from runtime type Literal['hello, stubtest'] diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 4a99c407f319..cd173f63e2a1 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -129,10 +129,10 @@ def get_description(self, concise: bool = False) -> str: stub_file = stub_node.path or None stub_loc_str = "" - if stub_line: - stub_loc_str += f" at line {stub_line}" if stub_file: stub_loc_str += f" in file {Path(stub_file)}" + if stub_line: + stub_loc_str += f"{':' if stub_file else ' at line '}{stub_line}" runtime_line = None runtime_file = None @@ -147,10 +147,10 @@ def get_description(self, concise: bool = False) -> str: pass runtime_loc_str = "" - if runtime_line: - runtime_loc_str += f" at line {runtime_line}" if runtime_file: runtime_loc_str += f" in file {Path(runtime_file)}" + if runtime_line: + runtime_loc_str += f"{':' if runtime_file else ' at line '}{runtime_line}" output = [ _style("error: ", color="red", bold=True), diff --git a/mypy/test/data.py b/mypy/test/data.py index 535ebf304784..6e2ad198f614 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -169,9 +169,7 @@ def parse_test_case(case: DataDrivenTestCase) -> None: elif item.id == "triggered" and item.arg is None: triggered = item.data else: - raise ValueError( - f"Invalid section header {item.id} in {case.file} at line {item.line}" - ) + raise ValueError(f"Invalid section header {item.id} in {case.file}:{item.line}") if out_section_missing: raise ValueError(f"{case.file}, line {first_item.line}: Required output section not found") diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 42dd40d76414..6bb4dfb2c937 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1561,9 +1561,9 @@ def test_output(self) -> None: expected = ( f'error: {TEST_MODULE_NAME}.bad is inconsistent, stub argument "number" differs ' 'from runtime argument "num"\n' - f"Stub: at line 1 in file {TEST_MODULE_NAME}.pyi\n" + f"Stub: in file {TEST_MODULE_NAME}.pyi:1\n" "def (number: builtins.int, text: builtins.str)\n" - f"Runtime: at line 1 in file {TEST_MODULE_NAME}.py\ndef (num, text)\n\n" + f"Runtime: in file {TEST_MODULE_NAME}.py:1\ndef (num, text)\n\n" "Found 1 error (checked 1 module)\n" ) assert remove_color_code(output) == expected @@ -1721,7 +1721,7 @@ def test_config_file(self) -> None: output = run_stubtest(stub=stub, runtime=runtime, options=[]) assert remove_color_code(output) == ( f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n" - f"Stub: at line 2 in file {TEST_MODULE_NAME}.pyi\n_decimal.Decimal\nRuntime:\n5\n\n" + f"Stub: in file {TEST_MODULE_NAME}.pyi:2\n_decimal.Decimal\nRuntime:\n5\n\n" "Found 1 error (checked 1 module)\n" ) output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) From f2cac4a1bf08874f3862cdb48cad7f908577c400 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 23 Feb 2023 22:50:45 +0100 Subject: [PATCH 287/292] [1.1 backport] [dataclass_transform] detect transform spec changes in incremental mode (#14695) (#14768) Adds support for triggering rechecking of downstream classes when `@dataclass_transform` is added or removed from a function/class, as well as when parameters to `dataclass_transform` are updated. These changes aren't propagated normally since they don't change the type signature of the `dataclass_transform` decorator. Also adds new a new `find-grained-dataclass-transform.test` file to test the new logic. (cherry picked from commit 29bcc7ffe1118d01c374a12957b09e4d42c1c69e) Co-authored-by: Wesley Collin Wright --- mypy/nodes.py | 4 +- mypy/server/astdiff.py | 8 ++ .../fine-grained-dataclass-transform.test | 92 +++++++++++++++++++ 3 files changed, 102 insertions(+), 2 deletions(-) create mode 100644 test-data/unit/fine-grained-dataclass-transform.test diff --git a/mypy/nodes.py b/mypy/nodes.py index 4787930214f3..9247d391bc96 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3907,7 +3907,7 @@ def serialize(self) -> JsonDict: "order_default": self.order_default, "kw_only_default": self.kw_only_default, "frozen_only_default": self.frozen_default, - "field_specifiers": self.field_specifiers, + "field_specifiers": list(self.field_specifiers), } @classmethod @@ -3917,7 +3917,7 @@ def deserialize(cls, data: JsonDict) -> DataclassTransformSpec: order_default=data.get("order_default"), kw_only_default=data.get("kw_only_default"), frozen_default=data.get("frozen_default"), - field_specifiers=data.get("field_specifiers"), + field_specifiers=tuple(data.get("field_specifiers", [])), ) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 40b60f1a69d8..c942a5eb3b0f 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -73,6 +73,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' TypeVarTupleExpr, Var, ) +from mypy.semanal_shared import find_dataclass_transform_spec from mypy.types import ( AnyType, CallableType, @@ -230,6 +231,7 @@ def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> Symb elif isinstance(node, OverloadedFuncDef) and node.impl: impl = node.impl.func if isinstance(node.impl, Decorator) else node.impl is_trivial_body = impl.is_trivial_body if impl else False + dataclass_transform_spec = find_dataclass_transform_spec(node) return ( "Func", common, @@ -239,6 +241,7 @@ def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> Symb node.is_static, signature, is_trivial_body, + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, ) elif isinstance(node, Var): return ("Var", common, snapshot_optional_type(node.type), node.is_final) @@ -256,6 +259,10 @@ def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> Symb snapshot_definition(node.func, common), ) elif isinstance(node, TypeInfo): + dataclass_transform_spec = node.dataclass_transform_spec + if dataclass_transform_spec is None: + dataclass_transform_spec = find_dataclass_transform_spec(node) + attrs = ( node.is_abstract, node.is_enum, @@ -280,6 +287,7 @@ def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> Symb tuple(snapshot_type(tdef) for tdef in node.defn.type_vars), [snapshot_type(base) for base in node.bases], [snapshot_type(p) for p in node._promote], + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, ) prefix = node.fullname symbol_table = snapshot_symbol_table(prefix, node.names) diff --git a/test-data/unit/fine-grained-dataclass-transform.test b/test-data/unit/fine-grained-dataclass-transform.test new file mode 100644 index 000000000000..7dc852f1d733 --- /dev/null +++ b/test-data/unit/fine-grained-dataclass-transform.test @@ -0,0 +1,92 @@ +[case updateDataclassTransformParameterViaDecorator] +# flags: --python-version 3.11 +from m import my_dataclass + +@my_dataclass +class Foo: + x: int + +foo = Foo(1) +foo.x = 2 + +[file m.py] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=False) +def my_dataclass(cls): return cls + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +def my_dataclass(cls): return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +== +main:9: error: Property "x" defined in "Foo" is read-only + +[case updateDataclassTransformParameterViaParentClass] +# flags: --python-version 3.11 +from m import Dataclass + +class Foo(Dataclass): + x: int + +foo = Foo(1) +foo.x = 2 + +[file m.py] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=False) +class Dataclass: ... + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass: ... + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +== +main:8: error: Property "x" defined in "Foo" is read-only + +[case updateBaseClassToUseDataclassTransform] +# flags: --python-version 3.11 +from m import A + +class B(A): + y: int + +B(x=1, y=2) + +[file m.py] +class Dataclass: ... + +class A(Dataclass): + x: int + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform() +class Dataclass: ... + +class A(Dataclass): + x: int + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +main:7: error: Unexpected keyword argument "x" for "B" +builtins.pyi:12: note: "B" defined here +main:7: error: Unexpected keyword argument "y" for "B" +builtins.pyi:12: note: "B" defined here +== From 17fba49939d4d8408f77e539290b24dd9b7f07ae Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 23 Feb 2023 22:50:58 +0100 Subject: [PATCH 288/292] [1.1 backport] [dataclass_transform] include __dataclass_fields__ in transformed types (#14752) (#14769) `dataclasses` uses a `__dataclass_fields__` attribute on each class to mark that it is a dataclass, and Typeshed checks for this attribute in its stubs for functions like `dataclasses.is_dataclass` and `dataclasses.asdict`. In #14667, I mistakenly removed this attribute for classes transformed by a `dataclass_transform`. This was due to a misinterpretation of PEP 681 on my part; after rereading the [section on dataclass semantics](https://peps.python.org/pep-0681/#dataclass-semantics), it says: > Except where stated otherwise in this PEP, classes impacted by `dataclass_transform`, either by inheriting from a class that is decorated with `dataclass_transform` or by being decorated with a function decorated with `dataclass_transform`, are assumed to behave like stdlib dataclass. The PEP doesn't seem to state anything about `__dataclass_fields__` or the related functions as far as I can tell, so we should assume that transforms should match the behavior of `dataclasses.dataclass` in this regard and include the attribute. This also matches the behavior of Pyright, which the PEP defines as the reference implementation. (cherry picked from commit 54635dec2379e2ac8b65b6ef07778015c69cfb6a) Co-authored-by: Wesley Collin Wright --- mypy/plugins/dataclasses.py | 19 ++++++++++--------- test-data/unit/check-dataclass-transform.test | 3 +-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 6b1062d6457f..7694134ac09e 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -648,17 +648,18 @@ def _is_kw_only_type(self, node: Type | None) -> bool: return node_type.type.fullname == "dataclasses.KW_ONLY" def _add_dataclass_fields_magic_attribute(self) -> None: - # Only add if the class is a dataclasses dataclass, and omit it for dataclass_transform - # classes. - # It would be nice if this condition were reified rather than using an `is` check. - # Only add if the class is a dataclasses dataclass, and omit it for dataclass_transform - # classes. - if self._spec is not _TRANSFORM_SPEC_FOR_DATACLASSES: - return - attr_name = "__dataclass_fields__" any_type = AnyType(TypeOfAny.explicit) - field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type + # For `dataclasses`, use the type `dict[str, Field[Any]]` for accuracy. For dataclass + # transforms, it's inaccurate to use `Field` since a given transform may use a completely + # different type (or none); fall back to `Any` there. + # + # In either case, we're aiming to match the Typeshed stub for `is_dataclass`, which expects + # the instance to have a `__dataclass_fields__` attribute of type `dict[str, Field[Any]]`. + if self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: + field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type + else: + field_type = any_type attr_type = self._api.named_type( "builtins.dict", [self._api.named_type("builtins.str"), field_type] ) diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 2a7fad1da992..ec87bd4757ed 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -279,8 +279,7 @@ class Bad: bad1: int = field(alias=some_str()) # E: "alias" argument to dataclass field must be a string literal bad2: int = field(kw_only=some_bool()) # E: "kw_only" argument must be a boolean literal -# this metadata should only exist for dataclasses.dataclass classes -Foo.__dataclass_fields__ # E: "Type[Foo]" has no attribute "__dataclass_fields__" +reveal_type(Foo.__dataclass_fields__) # N: Revealed type is "builtins.dict[builtins.str, Any]" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] From c2016586d45767246d73bc38fd5b01e0d5c8f787 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Thu, 23 Feb 2023 20:10:20 -0800 Subject: [PATCH 289/292] Remove +dev from version number before release --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 258a0e4f8bcb..c3eb4666972d 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.1.0+dev" +__version__ = "1.1.0" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From a27dec535e3eb1ed8dab1625e592bce5ab9a7972 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 27 Feb 2023 18:22:58 +0000 Subject: [PATCH 290/292] Fix `--strict-equality` crash for instances of a class generic over a `ParamSpec` (#14792) Fixes #14783. Running mypy on this snippet of code currently causes a crash if you have the `--strict-equality` option enabled: ```python from typing import Generic, ParamSpec P = ParamSpec("P") class Foo(Generic[P]): ... def checker(foo1: Foo[[int]], foo2: Foo[[str]]) -> None: foo1 == foo2 ``` This is because the overlapping-equality logic in `meet.py` currently does not account for the fact that `left` and `right` might both be instances of `mypy.types.Parameters`, leading to this assertion being tripped: https://github.com/python/mypy/blob/800e8ffdf17de9fc641fefff46389a940f147eef/mypy/meet.py#L519 This PR attempts to add the necessary logic to `meet.py` to handle instances of `mypy.types.Parameters`. --- mypy/meet.py | 17 +++++++++++++- test-data/unit/pythoneval.test | 43 ++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/mypy/meet.py b/mypy/meet.py index d99e1a92d2eb..3214b4b43975 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -342,7 +342,22 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: left_possible = get_possible_variants(left) right_possible = get_possible_variants(right) - # We start by checking multi-variant types like Unions first. We also perform + # First handle special cases relating to PEP 612: + # - comparing a `Parameters` to a `Parameters` + # - comparing a `Parameters` to a `ParamSpecType` + # - comparing a `ParamSpecType` to a `ParamSpecType` + # + # These should all always be considered overlapping equality checks. + # These need to be done before we move on to other TypeVarLike comparisons. + if isinstance(left, (Parameters, ParamSpecType)) and isinstance( + right, (Parameters, ParamSpecType) + ): + return True + # A `Parameters` does not overlap with anything else, however + if isinstance(left, Parameters) or isinstance(right, Parameters): + return False + + # Now move on to checking multi-variant types like Unions. We also perform # the same logic if either type happens to be a TypeVar/ParamSpec/TypeVarTuple. # # Handling the TypeVarLikes now lets us simulate having them bind to the corresponding diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index fbbaecbba241..a3413e071184 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1924,3 +1924,46 @@ _testStarUnpackNestedUnderscore.py:10: error: List item 0 has incompatible type _testStarUnpackNestedUnderscore.py:10: error: List item 1 has incompatible type "int"; expected "str" _testStarUnpackNestedUnderscore.py:11: note: Revealed type is "builtins.list[builtins.str]" _testStarUnpackNestedUnderscore.py:16: note: Revealed type is "builtins.list[builtins.object]" + +[case testStrictEqualitywithParamSpec] +# flags: --strict-equality +from typing import Generic +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") + +class Foo(Generic[P]): ... +class Bar(Generic[P]): ... + +def bad(foo: Foo[[int]], bar: Bar[[int]]) -> bool: + return foo == bar + +def good1(foo1: Foo[[int]], foo2: Foo[[str]]) -> bool: + return foo1 == foo2 + +def good2(foo1: Foo[[int, str]], foo2: Foo[[int, bytes]]) -> bool: + return foo1 == foo2 + +def good3(foo1: Foo[[int]], foo2: Foo[[int, int]]) -> bool: + return foo1 == foo2 + +def good4(foo1: Foo[[int]], foo2: Foo[[int]]) -> bool: + return foo1 == foo2 + +def good5(foo1: Foo[[int]], foo2: Foo[[bool]]) -> bool: + return foo1 == foo2 + +def good6(foo1: Foo[[int, int]], foo2: Foo[[bool, bool]]) -> bool: + return foo1 == foo2 + +def good7(foo1: Foo[[int]], foo2: Foo[P], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +def good8(foo1: Foo[P], foo2: Foo[[int, str, bytes]], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +def good9(foo1: Foo[Concatenate[int, P]], foo2: Foo[[int, str, bytes]], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +[out] +_testStrictEqualitywithParamSpec.py:11: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Bar[[int]]") From 6d355f57df1a664e9853891ca77af68944242d52 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 2 Mar 2023 15:15:22 +0000 Subject: [PATCH 291/292] [Release 1.1] Cherry-pick some mypyc build fixes (#14820) The mypyc build was recently broken by a new release of `types-setuptools`. This was fixed on `master` by the following two PRs: - #14781 - #14787 However, the mypyc build is still broken on the 1.1 branch: https://github.com/python/mypy/actions/runs/4311688115/jobs/7521345529. This PR cherry-picks the two PRs that fixed the build to the 1.1 branch. --------- Co-authored-by: Avasam --- mypyc/build.py | 21 +++++++++++++++------ setup.py | 12 +++++++++++- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/mypyc/build.py b/mypyc/build.py index cc03eba95b4e..8e1ee8078c11 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -25,7 +25,7 @@ import re import sys import time -from typing import TYPE_CHECKING, Any, Dict, Iterable, NoReturn, cast +from typing import TYPE_CHECKING, Any, Dict, Iterable, NoReturn, Union, cast from mypy.build import BuildSource from mypy.errors import CompileError @@ -41,11 +41,17 @@ from mypyc.options import CompilerOptions if TYPE_CHECKING: - from distutils.core import Extension + from distutils.core import Extension as _distutils_Extension + from typing_extensions import TypeAlias + + from setuptools import Extension as _setuptools_Extension + + Extension: TypeAlias = Union[_setuptools_Extension, _distutils_Extension] + try: # Import setuptools so that it monkey-patch overrides distutils - import setuptools # noqa: F401 + import setuptools except ImportError: if sys.version_info >= (3, 12): # Raise on Python 3.12, since distutils will go away forever @@ -57,13 +63,16 @@ def get_extension() -> type[Extension]: # We can work with either setuptools or distutils, and pick setuptools # if it has been imported. use_setuptools = "setuptools" in sys.modules + extension_class: type[Extension] if not use_setuptools: - from distutils.core import Extension + import distutils.core + + extension_class = distutils.core.Extension else: - from setuptools import Extension + extension_class = setuptools.Extension - return Extension + return extension_class def setup_mypycify_vars() -> None: diff --git a/setup.py b/setup.py index 516a639f3bb2..5d5ea06fb714 100644 --- a/setup.py +++ b/setup.py @@ -6,6 +6,7 @@ import os import os.path import sys +from typing import TYPE_CHECKING, Any if sys.version_info < (3, 7, 0): sys.stderr.write("ERROR: You need Python 3.7 or later to use mypy.\n") @@ -17,11 +18,14 @@ # This requires setuptools when building; setuptools is not needed # when installing from a wheel file (though it is still needed for # alternative forms of installing, as suggested by README.md). -from setuptools import find_packages, setup +from setuptools import Extension, find_packages, setup from setuptools.command.build_py import build_py from mypy.version import __version__ as version +if TYPE_CHECKING: + from typing_extensions import TypeGuard + description = "Optional static typing for Python" long_description = """ Mypy -- Optional Static Typing for Python @@ -36,6 +40,10 @@ """.lstrip() +def is_list_of_setuptools_extension(items: list[Any]) -> TypeGuard[list[Extension]]: + return all(isinstance(item, Extension) for item in items) + + def find_package_data(base, globs, root="mypy"): """Find all interesting data files, for setup(package_data=) @@ -166,6 +174,8 @@ def run(self): # our Appveyor builds run out of memory sometimes. multi_file=sys.platform == "win32" or force_multifile, ) + assert is_list_of_setuptools_extension(ext_modules), "Expected mypycify to use setuptools" + else: ext_modules = [] From 9b777a36315b1ba24ab840f9f905cfb6c82e35a9 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Mon, 6 Mar 2023 10:06:23 -0800 Subject: [PATCH 292/292] bump version to 1.1.1 for wheels build --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index c3eb4666972d..4d32a1d18dc8 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.1.0" +__version__ = "1.1.1" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
folder/subfolder/something.py