From 4aab10c36c121424ce1f11738d8ccfdf3c687e72 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Mon, 1 Jan 2024 15:33:42 -0800 Subject: [PATCH] tmp --- .pre-commit-config.yaml | 2 +- mypy/binder.py | 2 +- mypy/build.py | 10 ++++------ mypy/checker.py | 7 +++---- mypy/checkexpr.py | 6 ++---- mypy/checkpattern.py | 2 +- mypy/checkstrformat.py | 4 ++-- mypy/dmypy_server.py | 2 +- mypy/dmypy_util.py | 2 +- mypy/errors.py | 2 +- mypy/main.py | 6 +++--- mypy/message_registry.py | 2 +- mypy/messages.py | 2 +- mypy/nodes.py | 6 +++--- mypy/patterns.py | 14 ++++++++------ mypy/plugins/enums.py | 4 ++-- mypy/renaming.py | 2 +- mypy/semanal.py | 8 ++++---- mypy/semanal_enum.py | 2 +- mypy/semanal_namedtuple.py | 2 +- mypy/semanal_typeddict.py | 2 +- mypy/server/astmerge.py | 4 ++-- mypy/stubdoc.py | 2 +- mypy/stubgenc.py | 8 ++++---- mypy/stubtest.py | 16 ++++++---------- mypy/stubutil.py | 2 +- mypy/test/helpers.py | 4 ++-- mypy/test/testcheck.py | 5 ++++- mypy/test/testformatter.py | 4 ++-- mypy/test/teststubtest.py | 4 ++-- mypy/typeanal.py | 2 +- mypy/types.py | 2 +- mypy/typestate.py | 4 ++-- mypy/util.py | 2 +- mypyc/codegen/emitmodule.py | 2 +- mypyc/codegen/emitwrapper.py | 4 ++-- mypyc/irbuild/prepare.py | 6 +++--- mypyc/primitives/registry.py | 20 +++++++++++++++----- pyproject.toml | 6 ++++-- test-requirements.in | 2 +- test-requirements.txt | 2 +- 41 files changed, 99 insertions(+), 91 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bd2a09b7a8cfe..4090bf0ecb4c6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.0 # must match test-requirements.txt + rev: v0.1.4 # must match test-requirements.txt hooks: - id: ruff args: [--exit-non-zero-on-fix] diff --git a/mypy/binder.py b/mypy/binder.py index 3b67d09f16c3b..9d0a33b54bc29 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -291,7 +291,7 @@ def assign_type( self.type_assignments[expr].append((type, declared_type)) return if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): - return None + return if not literal(expr): return self.invalidate_dependencies(expr) diff --git a/mypy/build.py b/mypy/build.py index b3ca8d06916d0..8049fa2d0c3f4 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -682,9 +682,7 @@ def __init__( # for efficient lookup self.shadow_map: dict[str, str] = {} if self.options.shadow_file is not None: - self.shadow_map = { - source_file: shadow_file for (source_file, shadow_file) in self.options.shadow_file - } + self.shadow_map = dict(self.options.shadow_file) # a mapping from each file being typechecked to its possible shadow file self.shadow_equivalence_map: dict[str, str | None] = {} self.plugin = plugin @@ -1120,7 +1118,7 @@ def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta] module_deps_metas = deps_meta["deps_meta"] assert isinstance(module_deps_metas, dict) if not manager.options.skip_cache_mtime_checks: - for id, meta in module_deps_metas.items(): + for meta in module_deps_metas.values(): try: matched = manager.getmtime(meta["path"]) == meta["mtime"] except FileNotFoundError: @@ -2093,7 +2091,7 @@ def load_tree(self, temporary: bool = False) -> None: self.meta.data_json, self.manager, "Load tree ", "Could not load tree: " ) if data is None: - return None + return t0 = time.time() # TODO: Assert data file wasn't changed. @@ -3383,7 +3381,7 @@ def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> strongly_connected_components() below for a reference. """ if len(ascc) == 1: - return [s for s in ascc] + return list(ascc) pri_spread = set() for id in ascc: state = graph[id] diff --git a/mypy/checker.py b/mypy/checker.py index 0ac8f6904973f..1b57ef7801045 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -632,7 +632,7 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: if not defn.items: # In this case we have already complained about none of these being # valid overloads. - return None + return if len(defn.items) == 1: self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, defn) @@ -676,7 +676,6 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.msg.no_overridable_method(defn.name, defn) self.check_explicit_override_decorator(defn, found_method_base_classes, defn.impl) self.check_inplace_operator_method(defn) - return None def extract_callable_type(self, inner_type: Type | None, ctx: Context) -> CallableType | None: """Get type as seen by an overload item caller.""" @@ -1838,7 +1837,7 @@ def check_match_args(self, var: Var, typ: Type, context: Context) -> None: return typ = get_proper_type(typ) if not isinstance(typ, TupleType) or not all( - [is_string_literal(item) for item in typ.items] + is_string_literal(item) for item in typ.items ): self.msg.note( "__match_args__ must be a tuple containing string literals for checking " @@ -5045,7 +5044,7 @@ def visit_break_stmt(self, s: BreakStmt) -> None: def visit_continue_stmt(self, s: ContinueStmt) -> None: self.binder.handle_continue() - return None + return def visit_match_stmt(self, s: MatchStmt) -> None: with self.binder.frame_context(can_skip=False, fall_through=0): diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 626584bc3a201..3af8d70e78c99 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -739,7 +739,7 @@ def check_typeddict_call( context: Context, orig_callee: Type | None, ) -> Type: - if args and all([ak in (ARG_NAMED, ARG_STAR2) for ak in arg_kinds]): + if args and all(ak in (ARG_NAMED, ARG_STAR2) for ak in arg_kinds): # ex: Point(x=42, y=1337, **extras) # This is a bit ugly, but this is a price for supporting all possible syntax # variants for TypedDict constructors. @@ -4017,9 +4017,7 @@ def check_op( left_variants = [base_type] base_type = get_proper_type(base_type) if isinstance(base_type, UnionType): - left_variants = [ - item for item in flatten_nested_unions(base_type.relevant_items()) - ] + left_variants = list(flatten_nested_unions(base_type.relevant_items())) right_type = self.accept(arg) # Step 1: We first try leaving the right arguments alone and destructure diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index c0061f1c3e723..3210dcc3b7aca 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -187,7 +187,7 @@ def visit_or_pattern(self, o: OrPattern) -> PatternType: capture_types[node].append((expr, typ)) captures: dict[Expression, Type] = {} - for var, capture_list in capture_types.items(): + for capture_list in capture_types.values(): typ = UninhabitedType() for _, other in capture_list: typ = join_types(typ, other) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index 39d44e84a9c1d..c63210a96c443 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -372,7 +372,7 @@ def check_specs_in_format_call( ): # TODO: add support for some custom specs like datetime? self.msg.fail( - "Unrecognized format" ' specification "{}"'.format(spec.format_spec[1:]), + f'Unrecognized format specification "{spec.format_spec[1:]}"', call, code=codes.STRING_FORMATTING, ) @@ -482,7 +482,7 @@ def find_replacements_in_call(self, call: CallExpr, keys: list[str]) -> list[Exp expr = self.get_expr_by_name(key, call) if not expr: self.msg.fail( - "Cannot find replacement for named" ' format specifier "{}"'.format(key), + f'Cannot find replacement for named format specifier "{key}"', call, code=codes.STRING_FORMATTING, ) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 42236497f275c..b4c3fe8fe0dcb 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -1055,7 +1055,7 @@ def fix_module_deps(graph: mypy.build.Graph) -> None: This can make some suppressed dependencies non-suppressed, and vice versa (if modules have been added to or removed from the build). """ - for module, state in graph.items(): + for state in graph.values(): new_suppressed = [] new_dependencies = [] for dep in state.dependencies + state.suppressed: diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py index d95cba9f40b5d..fe949e8fc294d 100644 --- a/mypy/dmypy_util.py +++ b/mypy/dmypy_util.py @@ -43,7 +43,7 @@ def send(connection: IPCBase, data: Any) -> None: class WriteToConn: """Helper class to write to a connection instead of standard output.""" - def __init__(self, server: IPCBase, output_key: str = "stdout"): + def __init__(self, server: IPCBase, output_key: str = "stdout") -> None: self.server = server self.output_key = output_key diff --git a/mypy/errors.py b/mypy/errors.py index 6e90c28d9c036..eabe96a2dc737 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -170,7 +170,7 @@ def __init__( *, filter_errors: bool | Callable[[str, ErrorInfo], bool] = False, save_filtered_errors: bool = False, - ): + ) -> None: self.errors = errors self._has_new_errors = False self._filter = filter_errors diff --git a/mypy/main.py b/mypy/main.py index 2c68466ec9771..b32624456ce05 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -145,7 +145,7 @@ def main( sys.exit(code) # HACK: keep res alive so that mypyc won't free it before the hard_exit - list([res]) + list([res]) # noqa: C410 def run_build( @@ -349,7 +349,7 @@ class CapturableArgumentParser(argparse.ArgumentParser): yet output must be captured to properly support mypy.api.run. """ - def __init__(self, *args: Any, **kwargs: Any): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.stdout = kwargs.pop("stdout", sys.stdout) self.stderr = kwargs.pop("stderr", sys.stderr) super().__init__(*args, **kwargs) @@ -415,7 +415,7 @@ def __init__( default: str = argparse.SUPPRESS, help: str = "show program's version number and exit", stdout: IO[str] | None = None, - ): + ) -> None: super().__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help ) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 8dc14e158d90b..fb430b63c74b3 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -52,7 +52,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: '"return" with value in async generator is not allowed' ) INVALID_RETURN_TYPE_FOR_GENERATOR: Final = ErrorMessage( - 'The return type of a generator function should be "Generator"' " or one of its supertypes" + 'The return type of a generator function should be "Generator" or one of its supertypes' ) INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR: Final = ErrorMessage( 'The return type of an async generator function should be "AsyncGenerator" or one of its ' diff --git a/mypy/messages.py b/mypy/messages.py index 069c4d51e281d..450faf4c16886 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -3099,7 +3099,7 @@ def append_invariance_notes( ): invariant_type = "Dict" covariant_suggestion = ( - 'Consider using "Mapping" instead, ' "which is covariant in the value type" + 'Consider using "Mapping" instead, which is covariant in the value type' ) if invariant_type and covariant_suggestion: notes.append( diff --git a/mypy/nodes.py b/mypy/nodes.py index 17e06613d1e33..fe41777c55f7f 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3137,7 +3137,7 @@ def protocol_members(self) -> list[str]: if name in EXCLUDED_PROTOCOL_ATTRIBUTES: continue members.add(name) - return sorted(list(members)) + return sorted(members) def __getitem__(self, name: str) -> SymbolTableNode: n = self.get(name) @@ -3296,7 +3296,7 @@ def serialize(self) -> JsonDict: else self.typeddict_type.serialize(), "flags": get_flags(self, TypeInfo.FLAGS), "metadata": self.metadata, - "slots": list(sorted(self.slots)) if self.slots is not None else None, + "slots": sorted(self.slots) if self.slots is not None else None, "deletable_attributes": self.deletable_attributes, "self_type": self.self_type.serialize() if self.self_type is not None else None, "dataclass_transform_spec": ( @@ -3966,7 +3966,7 @@ def __init__( # frozen_default was added to CPythonin https://github.com/python/cpython/pull/99958 citing # positive discussion in typing-sig frozen_default: bool | None = None, - ): + ) -> None: self.eq_default = eq_default if eq_default is not None else True self.order_default = order_default if order_default is not None else False self.kw_only_default = kw_only_default if kw_only_default is not None else False diff --git a/mypy/patterns.py b/mypy/patterns.py index 839864ef58794..a01bf6acc8766 100644 --- a/mypy/patterns.py +++ b/mypy/patterns.py @@ -60,7 +60,7 @@ class ValuePattern(Pattern): expr: Expression - def __init__(self, expr: Expression): + def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr @@ -72,7 +72,7 @@ class SingletonPattern(Pattern): # This can be exactly True, False or None value: bool | None - def __init__(self, value: bool | None): + def __init__(self, value: bool | None) -> None: super().__init__() self.value = value @@ -85,7 +85,7 @@ class SequencePattern(Pattern): patterns: list[Pattern] - def __init__(self, patterns: list[Pattern]): + def __init__(self, patterns: list[Pattern]) -> None: super().__init__() self.patterns = patterns @@ -98,7 +98,7 @@ class StarredPattern(Pattern): # a name. capture: NameExpr | None - def __init__(self, capture: NameExpr | None): + def __init__(self, capture: NameExpr | None) -> None: super().__init__() self.capture = capture @@ -111,7 +111,9 @@ class MappingPattern(Pattern): values: list[Pattern] rest: NameExpr | None - def __init__(self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None): + def __init__( + self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None + ) -> None: super().__init__() assert len(keys) == len(values) self.keys = keys @@ -136,7 +138,7 @@ def __init__( positionals: list[Pattern], keyword_keys: list[str], keyword_values: list[Pattern], - ): + ) -> None: super().__init__() assert len(keyword_keys) == len(keyword_values) self.class_ref = class_ref diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 7869a8b5cdfab..2c568f66c62dc 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -166,11 +166,11 @@ class SomeEnum: for n in stnodes if n is None or not n.implicit ) - proper_types = list( + proper_types = [ _infer_value_type_with_auto_fallback(ctx, t) for t in node_types if t is None or not isinstance(t, CallableType) - ) + ] underlying_type = _first(proper_types) if underlying_type is None: return ctx.default_attr_type diff --git a/mypy/renaming.py b/mypy/renaming.py index c960eb4b1ce82..8db3362059609 100644 --- a/mypy/renaming.py +++ b/mypy/renaming.py @@ -270,7 +270,7 @@ def flush_refs(self) -> None: This will be called at the end of a scope. """ is_func = self.scope_kinds[-1] == FUNCTION - for name, refs in self.refs[-1].items(): + for refs in self.refs[-1].values(): if len(refs) == 1: # Only one definition -- no renaming needed. continue diff --git a/mypy/semanal.py b/mypy/semanal.py index 48be004daf768..58fb07047c4fd 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -4153,7 +4153,7 @@ def check_typevarlike_name(self, call: CallExpr, name: str, context: Context) -> if len(call.args) < 1: self.fail(f"Too few arguments for {typevarlike_type}()", context) return False - if not isinstance(call.args[0], StrExpr) or not call.arg_kinds[0] == ARG_POS: + if not isinstance(call.args[0], StrExpr) or call.arg_kinds[0] != ARG_POS: self.fail(f"{typevarlike_type}() expects a string literal as first argument", context) return False elif call.args[0].value != name: @@ -4962,7 +4962,7 @@ def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: """Bind name expression to a symbol table node.""" if isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym): self.fail( - '"{}" is a type variable and only valid in type ' "context".format(expr.name), expr + '"{}" is a type variable and only valid in type context'.format(expr.name), expr ) elif isinstance(sym.node, PlaceholderNode): self.process_placeholder(expr.name, "name", expr) @@ -6809,13 +6809,13 @@ def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSp def parse_dataclass_transform_field_specifiers(self, arg: Expression) -> tuple[str, ...]: if not isinstance(arg, TupleExpr): self.fail('"field_specifiers" argument must be a tuple literal', arg) - return tuple() + return () names = [] for specifier in arg.items: if not isinstance(specifier, RefExpr): self.fail('"field_specifiers" must only contain identifiers', specifier) - return tuple() + return () names.append(specifier.fullname) return tuple(names) diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index 528b0519cca1a..21576ab47a846 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -148,7 +148,7 @@ def parse_enum_call_args( Return a tuple of fields, values, was there an error. """ args = call.args - if not all([arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds]): + if not all(arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds): return self.fail_enum_call_arg(f"Unexpected arguments to {class_name}()", call) if len(args) < 2: return self.fail_enum_call_arg(f"Too few arguments for {class_name}()", call) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index bc3c5dd61894a..9a0be9d9c14ce 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -85,7 +85,7 @@ ) NAMEDTUP_CLASS_ERROR: Final = ( - "Invalid statement in NamedTuple definition; " 'expected "field_name: field_type [= default]"' + 'Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"' ) SELF_TVAR_NAME: Final = "_NT" diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 13aab4de65e47..67c05fd742735 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -50,7 +50,7 @@ ) TPDICT_CLASS_ERROR: Final = ( - "Invalid statement in TypedDict definition; " 'expected "field_name: field_type"' + 'Invalid statement in TypedDict definition; expected "field_name: field_type"' ) diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 862c3898a3838..174c2922c7676 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -394,7 +394,7 @@ def process_synthetic_type_info(self, info: TypeInfo) -> None: # have bodies in the AST so we need to iterate over their symbol # tables separately, unlike normal classes. self.process_type_info(info) - for name, node in info.names.items(): + for node in info.names.values(): if node.node: node.node.accept(self) @@ -549,7 +549,7 @@ def fixup(self, node: SN) -> SN: def replace_nodes_in_symbol_table( symbols: SymbolTable, replacements: dict[SymbolNode, SymbolNode] ) -> None: - for name, node in symbols.items(): + for node in symbols.values(): if node.node: if node.node in replacements: new = replacements[node.node] diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py index 86ff6e2bb540b..8c0a4dab696f9 100644 --- a/mypy/stubdoc.py +++ b/mypy/stubdoc.py @@ -325,7 +325,7 @@ def args_kwargs(signature: FunctionSig) -> bool: return has_arg("*args", signature) and has_arg("**kwargs", signature) # Move functions with (*args, **kwargs) in their signature to last place. - return list(sorted(self.signatures, key=lambda x: 1 if args_kwargs(x) else 0)) + return sorted(self.signatures, key=lambda x: 1 if args_kwargs(x) else 0) def infer_sig_from_docstring(docstr: str | None, name: str) -> list[FunctionSig] | None: diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 39288197f4777..55e46fe0ec25c 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -41,7 +41,7 @@ class ExternalSignatureGenerator(SignatureGenerator): def __init__( self, func_sigs: dict[str, str] | None = None, class_sigs: dict[str, str] | None = None - ): + ) -> None: """ Takes a mapping of function/method names to signatures and class name to class signatures (usually corresponds to __init__). @@ -187,7 +187,7 @@ class CFunctionStub: Class that mimics a C function in order to provide parseable docstrings. """ - def __init__(self, name: str, doc: str, is_abstract: bool = False): + def __init__(self, name: str, doc: str, is_abstract: bool = False) -> None: self.__name__ = name self.__doc__ = doc self.__abstractmethod__ = is_abstract @@ -404,7 +404,7 @@ def generate_module(self) -> None: if self.should_reexport(name, obj_module_name, name_is_alias=False): self.import_tracker.reexport(name) - self.set_defined_names(set([name for name, obj in all_items if not inspect.ismodule(obj)])) + self.set_defined_names({name for name, obj in all_items if not inspect.ismodule(obj)}) if self.resort_members: functions: list[str] = [] @@ -765,7 +765,7 @@ def generate_class_stub(self, class_name: str, cls: type, output: list[str]) -> items = self.get_members(cls) if self.resort_members: items = sorted(items, key=lambda x: method_name_sort_key(x[0])) - names = set(x[0] for x in items) + names = {x[0] for x in items} methods: list[str] = [] types: list[str] = [] static_properties: list[str] = [] diff --git a/mypy/stubtest.py b/mypy/stubtest.py index c02a3efd8dc01..e7cc24f33d18f 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -303,11 +303,9 @@ def _verify_exported_names( # desirable in at least the `names_in_runtime_not_stub` case stub_object=MISSING, runtime_object=MISSING, - stub_desc=( - f"Names exported in the stub but not at runtime: " f"{names_in_stub_not_runtime}" - ), + stub_desc=(f"Names exported in the stub but not at runtime: {names_in_stub_not_runtime}"), runtime_desc=( - f"Names exported at runtime but not in the stub: " f"{names_in_runtime_not_stub}" + f"Names exported at runtime but not in the stub: {names_in_runtime_not_stub}" ), ) @@ -677,7 +675,7 @@ def _verify_arg_default_value( runtime_type is not None and stub_type is not None # Avoid false positives for marker objects - and type(runtime_arg.default) != object + and type(runtime_arg.default) is not object # And ellipsis and runtime_arg.default is not ... and not is_subtype_helper(runtime_type, stub_type) @@ -897,7 +895,7 @@ def _verify_signature( runtime_arg.kind == inspect.Parameter.POSITIONAL_ONLY and not stub_arg.pos_only and not stub_arg.variable.name.startswith("__") - and not stub_arg.variable.name.strip("_") == "self" + and stub_arg.variable.name.strip("_") != "self" and not is_dunder(function_name, exclude_special=True) # noisy for dunder methods ): yield ( @@ -1812,10 +1810,8 @@ def get_importable_stdlib_modules() -> set[str]: # test.* modules do weird things like raising exceptions in __del__ methods, # leading to unraisable exceptions being logged to the terminal # as a warning at the end of the stubtest run - if ( - submodule_name.endswith(".__main__") - or submodule_name.startswith("idlelib.") - or submodule_name.startswith("test.") + if submodule_name.endswith(".__main__") or submodule_name.startswith( + ("idlelib.", "test.") ): continue diff --git a/mypy/stubutil.py b/mypy/stubutil.py index b8d601ed3c6b9..e4a97964c547c 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -558,7 +558,7 @@ def __init__( include_private: bool = False, export_less: bool = False, include_docstrings: bool = False, - ): + ) -> None: # Best known value of __all__. self._all_ = _all_ self._include_private = include_private diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index dc34931427ec7..bae4f6e81ad17 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -145,7 +145,7 @@ def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterab assert_string_arrays_equal( expected_normalized, actual_normalized, - ("Actual modules ({}) do not match expected modules ({}) " 'for "[{} ...]"').format( + ('Actual modules ({}) do not match expected modules ({}) for "[{} ...]"').format( ", ".join(actual_normalized), ", ".join(expected_normalized), name ), ) @@ -156,7 +156,7 @@ def assert_target_equivalence(name: str, expected: list[str], actual: list[str]) assert_string_arrays_equal( expected, actual, - ("Actual targets ({}) do not match expected targets ({}) " 'for "[{} ...]"').format( + ('Actual targets ({}) do not match expected targets ({}) for "[{} ...]"').format( ", ".join(actual), ", ".join(expected), name ), ) diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 3ad97ced61f2b..5fba6192dcaf3 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -29,6 +29,7 @@ except ImportError: lxml = None + import pytest # List of files that contain test case descriptions. @@ -99,9 +100,11 @@ def _filename(_msg: str) -> str: def run_case_once( self, testcase: DataDrivenTestCase, - operations: list[FileOperation] = [], + operations: list[FileOperation] | None = None, incremental_step: int = 0, ) -> None: + if operations is None: + operations = [] original_program_text = "\n".join(testcase.input) module_data = self.parse_module(original_program_text, incremental_step) diff --git a/mypy/test/testformatter.py b/mypy/test/testformatter.py index f64527e7804ae..9f8bb5d824089 100644 --- a/mypy/test/testformatter.py +++ b/mypy/test/testformatter.py @@ -52,14 +52,14 @@ def test_trim_source(self) -> None: def test_split_words(self) -> None: assert split_words("Simple message") == ["Simple", "message"] - assert split_words('Message with "Some[Long, Types]"' " in it") == [ + assert split_words('Message with "Some[Long, Types]" in it') == [ "Message", "with", '"Some[Long, Types]"', "in", "it", ] - assert split_words('Message with "Some[Long, Types]"' " and [error-code]") == [ + assert split_words('Message with "Some[Long, Types]" and [error-code]') == [ "Message", "with", '"Some[Long, Types]"', diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 34b266115166f..72b6f6620f833 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -173,7 +173,7 @@ def run_stubtest( class Case: - def __init__(self, stub: str, runtime: str, error: str | None): + def __init__(self, stub: str, runtime: str, error: str | None) -> None: self.stub = stub self.runtime = runtime self.error = error @@ -2226,7 +2226,7 @@ def also_bad(asdf): pass options=["--allowlist", allowlist.name, "--generate-allowlist"], ) assert output == ( - f"note: unused allowlist entry unused.*\n" f"{TEST_MODULE_NAME}.also_bad\n" + f"note: unused allowlist entry unused.*\n{TEST_MODULE_NAME}.also_bad\n" ) finally: os.unlink(allowlist.name) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 4d916315bdddd..8a840424f76fc 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -2319,7 +2319,7 @@ def visit_unbound_type(self, t: UnboundType) -> None: # Special case P.args and P.kwargs for ParamSpecs only. if name.endswith("args"): - if name.endswith(".args") or name.endswith(".kwargs"): + if name.endswith((".args", ".kwargs")): base = ".".join(name.split(".")[:-1]) n = self.api.lookup_qualified(base, t) if n is not None and isinstance(n.node, ParamSpecExpr): diff --git a/mypy/types.py b/mypy/types.py index fcdb61f9719b7..f02e56a677ae7 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -441,7 +441,7 @@ class TypeGuardedType(Type): __slots__ = ("type_guard",) - def __init__(self, type_guard: Type): + def __init__(self, type_guard: Type) -> None: super().__init__(line=type_guard.line, column=type_guard.column) self.type_guard = type_guard diff --git a/mypy/typestate.py b/mypy/typestate.py index b32fb0ef6df14..c5a5da03eae54 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -192,7 +192,7 @@ def record_subtype_cache_entry( # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return - cache = self._subtype_caches.setdefault(right.type, dict()) + cache = self._subtype_caches.setdefault(right.type, {}) cache.setdefault(kind, set()).add((left, right)) def record_negative_subtype_cache_entry( @@ -204,7 +204,7 @@ def record_negative_subtype_cache_entry( return if len(self._negative_subtype_caches) > MAX_NEGATIVE_CACHE_TYPES: self._negative_subtype_caches.clear() - cache = self._negative_subtype_caches.setdefault(right.type, dict()) + cache = self._negative_subtype_caches.setdefault(right.type, {}) subcache = cache.setdefault(kind, set()) if len(subcache) > MAX_NEGATIVE_CACHE_ENTRIES: subcache.clear() diff --git a/mypy/util.py b/mypy/util.py index be8a22d08a270..fddb1d6e70554 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -449,7 +449,7 @@ def get_unique_redefinition_name(name: str, existing: Container[str]) -> str: def check_python_version(program: str) -> None: """Report issues with the Python used to run mypy, dmypy, or stubgen""" # Check for known bad Python versions. - if sys.version_info[:2] < (3, 8): + if sys.version_info[:2] < (3, 8): # noqa: UP036 sys.exit( "Running {name} with Python 3.7 or lower is not supported; " "please upgrade to 3.8 or newer".format(name=program) diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index caf2058ea7c40..1a8f0d12a0777 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -993,7 +993,7 @@ def _toposort_visit(name: str) -> None: result.append(decl.declaration) decl.mark = True - for name, marked_declaration in marked_declarations.items(): + for name in marked_declarations.keys(): _toposort_visit(name) return result diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index 791e856c274ac..45c6c7a05867f 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -145,7 +145,7 @@ def generate_wrapper_function( real_args = list(fn.args) if fn.sig.num_bitmap_args: real_args = real_args[: -fn.sig.num_bitmap_args] - if fn.class_name and not fn.decl.kind == FUNC_STATICMETHOD: + if fn.class_name and fn.decl.kind != FUNC_STATICMETHOD: arg = real_args.pop(0) emitter.emit_line(f"PyObject *obj_{arg.name} = self;") @@ -238,7 +238,7 @@ def generate_legacy_wrapper_function( real_args = list(fn.args) if fn.sig.num_bitmap_args: real_args = real_args[: -fn.sig.num_bitmap_args] - if fn.class_name and not fn.decl.kind == FUNC_STATICMETHOD: + if fn.class_name and fn.decl.kind != FUNC_STATICMETHOD: arg = real_args.pop(0) emitter.emit_line(f"PyObject *obj_{arg.name} = self;") diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 5e65200481971..29e06439abdd6 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -139,7 +139,7 @@ def is_from_module(node: SymbolNode, module: MypyFile) -> bool: def load_type_map(mapper: Mapper, modules: list[MypyFile], deser_ctx: DeserMaps) -> None: """Populate a Mapper with deserialized IR from a list of modules.""" for module in modules: - for name, node in module.names.items(): + for node in module.names.values(): if isinstance(node.node, TypeInfo) and is_from_module(node.node, module): ir = deser_ctx.classes[node.node.fullname] mapper.type_to_ir[node.node] = ir @@ -153,7 +153,7 @@ def load_type_map(mapper: Mapper, modules: list[MypyFile], deser_ctx: DeserMaps) def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: """Collect all of the (non-method) functions declared in a module.""" - for name, node in module.names.items(): + for node in module.names.values(): # We need to filter out functions that are imported or # aliases. The best way to do this seems to be by # checking that the fullname matches. @@ -468,7 +468,7 @@ def prepare_non_ext_class_def( ir = mapper.type_to_ir[cdef.info] info = cdef.info - for name, node in info.names.items(): + for node in info.names.values(): if isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): diff --git a/mypyc/primitives/registry.py b/mypyc/primitives/registry.py index aa96b35aec562..11fca7dc2c708 100644 --- a/mypyc/primitives/registry.py +++ b/mypyc/primitives/registry.py @@ -93,7 +93,7 @@ def method_op( var_arg_type: RType | None = None, truncated_type: RType | None = None, ordering: list[int] | None = None, - extra_int_constants: list[tuple[int, RType]] = [], + extra_int_constants: list[tuple[int, RType]] | None = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1, @@ -122,6 +122,8 @@ def method_op( is_borrowed: if True, returned value is borrowed (no need to decrease refcount) priority: if multiple ops match, the one with the highest priority is picked """ + if extra_int_constants is None: + extra_int_constants = [] ops = method_call_ops.setdefault(name, []) desc = CFunctionDescription( name, @@ -150,7 +152,7 @@ def function_op( var_arg_type: RType | None = None, truncated_type: RType | None = None, ordering: list[int] | None = None, - extra_int_constants: list[tuple[int, RType]] = [], + extra_int_constants: list[tuple[int, RType]] | None = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1, @@ -165,6 +167,8 @@ def function_op( name: full name of the function arg_types: positional argument types for which this applies """ + if extra_int_constants is None: + extra_int_constants = [] ops = function_ops.setdefault(name, []) desc = CFunctionDescription( name, @@ -193,7 +197,7 @@ def binary_op( var_arg_type: RType | None = None, truncated_type: RType | None = None, ordering: list[int] | None = None, - extra_int_constants: list[tuple[int, RType]] = [], + extra_int_constants: list[tuple[int, RType]] | None = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1, @@ -205,6 +209,8 @@ def binary_op( Most arguments are similar to method_op(), but exactly two argument types are expected. """ + if extra_int_constants is None: + extra_int_constants = [] ops = binary_ops.setdefault(name, []) desc = CFunctionDescription( name, @@ -232,7 +238,7 @@ def custom_op( var_arg_type: RType | None = None, truncated_type: RType | None = None, ordering: list[int] | None = None, - extra_int_constants: list[tuple[int, RType]] = [], + extra_int_constants: list[tuple[int, RType]] | None = None, steals: StealsDescription = False, is_borrowed: bool = False, ) -> CFunctionDescription: @@ -240,6 +246,8 @@ def custom_op( Most arguments are similar to method_op(). """ + if extra_int_constants is None: + extra_int_constants = [] return CFunctionDescription( "", arg_types, @@ -264,7 +272,7 @@ def unary_op( error_kind: int, truncated_type: RType | None = None, ordering: list[int] | None = None, - extra_int_constants: list[tuple[int, RType]] = [], + extra_int_constants: list[tuple[int, RType]] | None = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1, @@ -276,6 +284,8 @@ def unary_op( Most arguments are similar to method_op(), but exactly one argument type is expected. """ + if extra_int_constants is None: + extra_int_constants = [] ops = unary_ops.setdefault(name, []) desc = CFunctionDescription( name, diff --git a/pyproject.toml b/pyproject.toml index c43253fed9825..a98d75ea54606 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,24 +33,26 @@ fix = true select = [ "E", # pycodestyle (error) "F", # pyflakes + "W", # pycodestyle (warning) "B", # flake8-bugbear "I", # isort "RUF100", # Unused noqa comments "PGH004", # blanket noqa comments "UP", # pyupgrade + "C4", "SIM201", "ISC001", "RET501", ] ignore = [ - "B006", # use of mutable defaults in function signatures "B007", # Loop control variable not used within the loop body. "B011", # Don't use assert False "B023", # Function definition does not bind loop variable - "E203", # conflicts with black + "E2", # conflicts with black "E402", # module level import not at top of file "E501", # conflicts with black "E731", # Do not assign a `lambda` expression, use a `def` "E741", # Ambiguous variable name "UP032", # 'f-string always preferable to format' is controversial + "C416", # There are a few cases where it's nice to have names for the dict items ] unfixable = [ diff --git a/test-requirements.in b/test-requirements.in index bab3ece29c02e..2bf8de0aa2f54 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -14,6 +14,6 @@ psutil>=4.0 pytest>=7.4.0 pytest-xdist>=1.34.0 pytest-cov>=2.10.0 -ruff==0.1.0 # must match version in .pre-commit-config.yaml +ruff==0.1.4 # must match version in .pre-commit-config.yaml setuptools>=65.5.1 tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 diff --git a/test-requirements.txt b/test-requirements.txt index 3bb9cf29635fd..57607c1bae57a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -67,7 +67,7 @@ ruamel-yaml==0.17.40 # via pre-commit-hooks ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.1.0 +ruff==0.1.4 # via -r test-requirements.in tomli==2.0.1 # via -r test-requirements.in