diff --git a/mypy/nodes.py b/mypy/nodes.py index 6375d500a8a3..f695ca354d0a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2,11 +2,15 @@ import os from abc import abstractmethod -from collections import OrderedDict +from collections import OrderedDict, defaultdict from typing import ( - Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable, Sequence, + Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable, Sequence ) +MYPY = False +if MYPY: + from typing import DefaultDict + import mypy.strconv from mypy.util import short_type from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor @@ -194,6 +198,8 @@ class MypyFile(SymbolNode): path = '' # Top-level definitions and statements defs = None # type: List[Statement] + # Type alias dependencies as mapping from target to set of alias full names + alias_deps = None # type: DefaultDict[str, Set[str]] # Is there a UTF-8 BOM at the start? is_bom = False names = None # type: SymbolTable @@ -215,6 +221,7 @@ def __init__(self, self.line = 1 # Dummy line number self.imports = imports self.is_bom = is_bom + self.alias_deps = defaultdict(set) if ignored_lines: self.ignored_lines = ignored_lines else: @@ -797,6 +804,8 @@ class AssignmentStmt(Statement): unanalyzed_type = None # type: Optional[mypy.types.Type] # This indicates usage of PEP 526 type annotation syntax in assignment. new_syntax = False # type: bool + # Does this assignment define a type alias? + is_alias_def = False def __init__(self, lvalues: List[Lvalue], rvalue: Expression, type: 'Optional[mypy.types.Type]' = None, new_syntax: bool = False) -> None: @@ -2341,6 +2350,10 @@ class SymbolTableNode: normalized = False # type: bool # Was this defined by assignment to self attribute? implicit = False # type: bool + # Is this node refers to other node via node aliasing? + # (This is currently used for simple aliases like `A = int` instead of .type_override) + is_aliasing = False # type: bool + alias_name = None # type: Optional[str] def __init__(self, kind: int, diff --git a/mypy/scope.py b/mypy/scope.py new file mode 100644 index 000000000000..0242f06a621d --- /dev/null +++ b/mypy/scope.py @@ -0,0 +1,80 @@ +"""Track current scope to easily calculate the corresponding fine-grained target. + +TODO: Use everywhere where we track targets, including in mypy.errors. +""" + +from typing import List, Optional + +from mypy.nodes import TypeInfo, FuncItem + + +class Scope: + """Track which target we are processing at any given time.""" + + def __init__(self) -> None: + self.module = None # type: Optional[str] + self.classes = [] # type: List[TypeInfo] + self.function = None # type: Optional[FuncItem] + # Number of nested scopes ignored (that don't get their own separate targets) + self.ignored = 0 + + def current_module_id(self) -> str: + assert self.module + return self.module + + def current_target(self) -> str: + """Return the current target (non-class; for a class return enclosing module).""" + assert self.module + target = self.module + if self.function: + if self.classes: + target += '.' + '.'.join(c.name() for c in self.classes) + target += '.' + self.function.name() + return target + + def current_full_target(self) -> str: + """Return the current target (may be a class).""" + assert self.module + target = self.module + if self.classes: + target += '.' + '.'.join(c.name() for c in self.classes) + if self.function: + target += '.' + self.function.name() + return target + + def enter_file(self, prefix: str) -> None: + self.module = prefix + self.classes = [] + self.function = None + self.ignored = 0 + + def enter_function(self, fdef: FuncItem) -> None: + if not self.function: + self.function = fdef + else: + # Nested functions are part of the topmost function target. + self.ignored += 1 + + def enter_class(self, info: TypeInfo) -> None: + """Enter a class target scope.""" + if not self.function: + self.classes.append(info) + else: + # Classes within functions are part of the enclosing function target. + self.ignored += 1 + + def leave(self) -> None: + """Leave the innermost scope (can be any kind of scope).""" + if self.ignored: + # Leave a scope that's included in the enclosing target. + self.ignored -= 1 + elif self.function: + # Function is always the innermost target. + self.function = None + elif self.classes: + # Leave the innermost class. + self.classes.pop() + else: + # Leave module. + assert self.module + self.module = None diff --git a/mypy/semanal.py b/mypy/semanal.py index 444838ede525..b7763c73e07f 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -85,6 +85,7 @@ from mypy import join from mypy.util import get_prefix, correct_relative_import from mypy.semanal_shared import PRIORITY_FALLBACKS +from mypy.scope import Scope T = TypeVar('T') @@ -255,6 +256,7 @@ def __init__(self, # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True + self.scope = Scope() def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: @@ -287,8 +289,10 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options, v.is_ready = True defs = file_node.defs + self.scope.enter_file(file_node.fullname()) for d in defs: self.accept(d) + self.scope.leave() if self.cur_mod_id == 'builtins': remove_imported_names_from_symtable(self.globals, 'builtins') @@ -305,11 +309,13 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options, def refresh_partial(self, node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None: """Refresh a stale target in fine-grained incremental mode.""" + self.scope.enter_file(self.cur_mod_id) if isinstance(node, MypyFile): self.refresh_top_level(node) else: self.recurse_into_functions = True self.accept(node) + self.scope.leave() def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" @@ -591,15 +597,19 @@ def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) - def analyze_function(self, defn: FuncItem) -> None: is_method = self.is_class_scope() + self.scope.enter_function(defn) with self.tvar_scope_frame(self.tvar_scope.method_frame()): if defn.type: self.check_classvar_in_signature(defn.type) assert isinstance(defn.type, CallableType) # Signature must be analyzed in the surrounding scope so that # class-level imported names and type variables are in scope. - defn.type = self.type_analyzer().visit_callable_type(defn.type, nested=False) + analyzer = self.type_analyzer() + defn.type = analyzer.visit_callable_type(defn.type, nested=False) + self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) if isinstance(defn, FuncDef): + assert isinstance(defn.type, CallableType) defn.type = set_callable_name(defn.type, defn) for arg in defn.arguments: if arg.initializer: @@ -633,6 +643,7 @@ def analyze_function(self, defn: FuncItem) -> None: self.leave() self.function_stack.pop() + self.scope.leave() def check_classvar_in_signature(self, typ: Type) -> None: if isinstance(typ, Overloaded): @@ -660,10 +671,12 @@ def check_function_signature(self, fdef: FuncItem) -> None: self.fail('Type signature has too many arguments', fdef, blocker=True) def visit_class_def(self, defn: ClassDef) -> None: + self.scope.enter_class(defn.info) with self.analyze_class_body(defn) as should_continue: if should_continue: # Analyze class body. defn.defs.accept(self) + self.scope.leave() @contextmanager def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]: @@ -1679,7 +1692,24 @@ def anal_type(self, t: Type, *, aliasing=aliasing, allow_tuple_literal=allow_tuple_literal, third_pass=third_pass) - return t.accept(a) + typ = t.accept(a) + self.add_type_alias_deps(a.aliases_used) + return typ + + def add_type_alias_deps(self, aliases_used: Iterable[str], + target: Optional[str] = None) -> None: + """Add full names of type aliases on which the current node depends. + + This is used by fine-grained incremental mode to re-check the corresponding nodes. + If `target` is None, then the target node used will be the current scope. + """ + if not aliases_used: + # A basic optimization to avoid adding targets with no dependencies to + # the `alias_deps` dict. + return + if target is None: + target = self.scope.current_target() + self.cur_mod_node.alias_deps[target].update(aliases_used) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: for lval in s.lvalues: @@ -1755,10 +1785,17 @@ def alias_fallback(self, tp: Type) -> Instance: return Instance(fb_info, []) def analyze_alias(self, rvalue: Expression, - warn_bound_tvar: bool = False) -> Tuple[Optional[Type], List[str]]: - """Check if 'rvalue' represents a valid type allowed for aliasing - (e.g. not a type variable). If yes, return the corresponding type and a list of - qualified type variable names for generic aliases. + warn_bound_tvar: bool = False) -> Tuple[Optional[Type], List[str], + Set[str], List[str]]: + """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). + + If yes, return the corresponding type, a list of + qualified type variable names for generic aliases, a set of names the alias depends on, + and a list of type variables if the alias is generic. + An schematic example for the dependencies: + A = int + B = str + analyze_alias(Dict[A, B])[2] == {'__main__.A', '__main__.B'} """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack @@ -1775,15 +1812,21 @@ def analyze_alias(self, rvalue: Expression, in_dynamic_func=dynamic, global_scope=global_scope, warn_bound_tvar=warn_bound_tvar) + typ = None # type: Optional[Type] if res: - alias_tvars = [name for (name, _) in - res.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope))] + typ, depends_on = res + found_type_vars = typ.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope)) + alias_tvars = [name for (name, node) in found_type_vars] + qualified_tvars = [node.fullname() for (name, node) in found_type_vars] else: alias_tvars = [] - return res, alias_tvars + depends_on = set() + qualified_tvars = [] + return typ, alias_tvars, depends_on, qualified_tvars def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None: """Check if assignment creates a type alias and set it up as needed. + For simple aliases like L = List we use a simpler mechanism, just copying TypeInfo. For subscripted (including generic) aliases the resulting types are stored in rvalue.analyzed. @@ -1809,11 +1852,20 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None: # annotations (see the second rule). return rvalue = s.rvalue - res, alias_tvars = self.analyze_alias(rvalue, warn_bound_tvar=True) + res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias(rvalue, + warn_bound_tvar=True) if not res: return + s.is_alias_def = True node = self.lookup(lvalue.name, lvalue) assert node is not None + if lvalue.fullname is not None: + node.alias_name = lvalue.fullname + self.add_type_alias_deps(depends_on) + self.add_type_alias_deps(qualified_tvars) + # The above are only direct deps on other aliases. + # For subscripted aliases, type deps from expansion are added in deps.py + # (because the type is stored) if not lvalue.is_inferred_def: # Type aliases can't be re-defined. if node and (node.kind == TYPE_ALIAS or isinstance(node.node, TypeInfo)): @@ -1830,7 +1882,14 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None: # For simple (on-generic) aliases we use aliasing TypeInfo's # to allow using them in runtime context where it makes sense. node.node = res.type + node.is_aliasing = True if isinstance(rvalue, RefExpr): + # For non-subscripted aliases we add type deps right here + # (because the node is stored, not type) + # TODO: currently subscripted and unsubscripted aliases are processed differently + # This leads to duplication of most of the logic with small variations. + # Fix this. + self.add_type_alias_deps({node.node.fullname()}) sym = self.lookup_type_node(rvalue) if sym: node.normalized = sym.normalized @@ -3439,12 +3498,15 @@ def visit_index_expr(self, expr: IndexExpr) -> None: elif isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS: # Special form -- subscripting a generic type alias. # Perform the type substitution and create a new alias. - res, alias_tvars = self.analyze_alias(expr) + res, alias_tvars, depends_on, _ = self.analyze_alias(expr) assert res is not None, "Failed analyzing already defined alias" expr.analyzed = TypeAliasExpr(res, alias_tvars, fallback=self.alias_fallback(res), in_runtime=True) expr.analyzed.line = expr.line expr.analyzed.column = expr.column + # We also store fine-grained dependencies to correctly re-process nodes + # with situations like `L = LongGeneric; x = L[int]()`. + self.add_type_alias_deps(depends_on) elif refers_to_class_or_function(expr.base): # Special form -- type application. # Translate index to an unanalyzed type. diff --git a/mypy/semanal_pass3.py b/mypy/semanal_pass3.py index 01c5a6627deb..783c2754a9d2 100644 --- a/mypy/semanal_pass3.py +++ b/mypy/semanal_pass3.py @@ -10,7 +10,8 @@ """ from collections import OrderedDict -from typing import Dict, List, Callable, Optional, Union, Set, cast, Tuple +from contextlib import contextmanager +from typing import Dict, List, Callable, Optional, Union, Set, cast, Tuple, Iterator from mypy import messages, experiments from mypy.nodes import ( @@ -31,6 +32,7 @@ from mypy.semanal_shared import PRIORITY_FORWARD_REF, PRIORITY_TYPEVAR_VALUES from mypy.subtypes import is_subtype from mypy.sametypes import is_same_type +from mypy.scope import Scope import mypy.semanal @@ -46,6 +48,7 @@ def __init__(self, modules: Dict[str, MypyFile], errors: Errors, self.modules = modules self.errors = errors self.sem = sem + self.scope = Scope() # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True @@ -59,18 +62,23 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options, self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() + self.cur_mod_node = file_node self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): + self.scope.enter_file(file_node.fullname()) self.accept(file_node) + self.scope.leave() def refresh_partial(self, node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None: """Refresh a stale target in fine-grained incremental mode.""" + self.scope.enter_file(self.sem.cur_mod_id) if isinstance(node, MypyFile): self.recurse_into_functions = False self.refresh_top_level(node) else: self.recurse_into_functions = True self.accept(node) + self.scope.leave() def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" @@ -91,10 +99,12 @@ def visit_block(self, b: Block) -> None: def visit_func_def(self, fdef: FuncDef) -> None: if not self.recurse_into_functions: return + self.scope.enter_function(fdef) self.errors.push_function(fdef.name()) self.analyze(fdef.type, fdef) super().visit_func_def(fdef) self.errors.pop_function() + self.scope.leave() def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None: if not self.recurse_into_functions: @@ -105,6 +115,7 @@ def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None: def visit_class_def(self, tdef: ClassDef) -> None: # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to # check them again here. + self.scope.enter_class(tdef.info) if not tdef.info.is_named_tuple: types = list(tdef.info.bases) # type: List[Type] for tvar in tdef.type_vars: @@ -135,6 +146,7 @@ def visit_class_def(self, tdef: ClassDef) -> None: self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True) self.analyze_info(tdef.analyzed.info) super().visit_class_def(tdef) + self.scope.leave() def visit_decorator(self, dec: Decorator) -> None: """Try to infer the type of the decorated function. @@ -353,6 +365,9 @@ def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode], type.accept(analyzer) self.check_for_omitted_generics(type) self.generate_type_patches(node, indicator, warn) + if analyzer.aliases_used: + target = self.scope.current_target() + self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) def analyze_types(self, types: List[Type], node: Node) -> None: # Similar to above but for nodes with multiple types. @@ -361,6 +376,9 @@ def analyze_types(self, types: List[Type], node: Node) -> None: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) + if analyzer.aliases_used: + target = self.scope.current_target() + self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) self.generate_type_patches(node, indicator, warn=False) def generate_type_patches(self, diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index e2c2055cdbf2..d9fdf7b2da8d 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -43,7 +43,7 @@ from mypy.nodes import ( Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt, ImportFrom, Import, TypeInfo, SymbolTable, Var, CallExpr, Decorator, OverloadedFuncDef, - SuperExpr, UNBOUND_IMPORTED, GDEF, MDEF + SuperExpr, UNBOUND_IMPORTED, GDEF, MDEF, IndexExpr ) from mypy.traverser import TraverserVisitor @@ -204,6 +204,10 @@ def visit_member_expr(self, node: MemberExpr) -> None: node.def_var = None super().visit_member_expr(node) + def visit_index_expr(self, node: IndexExpr) -> None: + node.analyzed = None # was a type alias + super().visit_index_expr(node) + def strip_class_attr(self, name: str) -> None: if self.type is not None: del self.type.names[name] diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 2c1bdccc291c..c349793a7128 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -81,6 +81,10 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a from typing import Dict, List, Set, Optional, Tuple, Union +MYPY = False +if MYPY: + from typing import DefaultDict + from mypy.checkmember import bind_self from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, @@ -88,7 +92,7 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a ComparisonExpr, GeneratorExpr, DictionaryComprehension, StarExpr, PrintStmt, ForStmt, WithStmt, TupleExpr, ListExpr, OperatorAssignmentStmt, DelStmt, YieldFromExpr, Decorator, Block, TypeInfo, FuncBase, OverloadedFuncDef, RefExpr, SuperExpr, Var, NamedTupleExpr, TypedDictExpr, - LDEF, MDEF, GDEF, + LDEF, MDEF, GDEF, FuncItem, TypeAliasExpr, op_methods, reverse_op_methods, ops_with_inplace_method, unary_op_methods ) from mypy.traverser import TraverserVisitor @@ -99,24 +103,26 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a ) from mypy.server.trigger import make_trigger from mypy.util import correct_relative_import +from mypy.scope import Scope def get_dependencies(target: MypyFile, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> Dict[str, Set[str]]: """Get all dependencies of a node, recursively.""" - visitor = DependencyVisitor(type_map, python_version) + visitor = DependencyVisitor(type_map, python_version, target.alias_deps) target.accept(visitor) return visitor.map def get_dependencies_of_target(module_id: str, + module_tree: MypyFile, target: Node, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> Dict[str, Set[str]]: """Get dependencies of a target -- don't recursive into nested targets.""" # TODO: Add tests for this function. - visitor = DependencyVisitor(type_map, python_version) + visitor = DependencyVisitor(type_map, python_version, module_tree.alias_deps) visitor.scope.enter_file(module_id) if isinstance(target, MypyFile): # Only get dependencies of the top-level of the module. Don't recurse into @@ -140,10 +146,21 @@ def get_dependencies_of_target(module_id: str, class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], - python_version: Tuple[int, int]) -> None: + python_version: Tuple[int, int], + alias_deps: 'DefaultDict[str, Set[str]]') -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 + # This attribute holds a mapping from target to names of type aliases + # it depends on. These need to be processed specially, since they are + # only present in expanded form in symbol tables. For example, after: + # A = List[int] + # x: A + # The module symbol table will just have a Var `x` with type `List[int]`, + # and the dependency of `x` on `A` is lost. Therefore the alias dependencies + # are preserved at alias expansion points in `semanal.py`, stored as an attribute + # on MypyFile, and then passed here. + self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False @@ -153,13 +170,13 @@ def __init__(self, # await # protocols # metaclasses - # type aliases # functional enum # type variable with value restriction def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() + self.add_type_alias_deps(self.scope.current_target()) super().visit_mypy_file(o) self.scope.leave() @@ -177,6 +194,7 @@ def visit_func_def(self, o: FuncDef) -> None: if o.info: for base in non_trivial_bases(o.info): self.add_dependency(make_trigger(base.fullname() + '.' + o.name())) + self.add_type_alias_deps(self.scope.current_target()) super().visit_func_def(o) self.scope.leave() @@ -202,6 +220,7 @@ def visit_class_def(self, o: ClassDef) -> None: self.add_type_dependencies(o.info.typeddict_type, target=make_trigger(target)) # TODO: Add dependencies based on remaining TypeInfo attributes. super().visit_class_def(o) + self.add_type_alias_deps(self.scope.current_target()) self.is_class = old_is_class info = o.info for name, node in info.names.items(): @@ -239,7 +258,6 @@ def visit_block(self, o: Block) -> None: def visit_assignment_stmt(self, o: AssignmentStmt) -> None: # TODO: Implement all assignment special forms, including these: # Enum - # type aliases rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): # TODO: Support type variable value restriction @@ -264,6 +282,17 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: assert info.typeddict_type is not None prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) + elif o.is_alias_def: + assert len(o.lvalues) == 1 + lvalue = o.lvalues[0] + assert isinstance(lvalue, NameExpr) + # TODO: get rid of this extra dependency from __init__ to alias definition scope + typ = self.type_map.get(lvalue) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + class_name = typ.type_object().fullname() + self.add_dependency(make_trigger(class_name + '.__init__')) + if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): + self.add_type_dependencies(rvalue.analyzed.type) else: # Normal assignment super().visit_assignment_stmt(o) @@ -506,6 +535,13 @@ def visit_yield_from_expr(self, e: YieldFromExpr) -> None: # Helpers + def add_type_alias_deps(self, target: str) -> None: + # Type aliases are special, because some of the dependencies are calculated + # in semanal.py, before they are expanded. + if target in self.alias_deps: + for alias in self.alias_deps[target]: + self.add_dependency(make_trigger(alias)) + def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. @@ -573,78 +609,6 @@ def add_iter_dependency(self, node: Expression) -> None: self.add_attribute_dependency(typ, '__iter__') -class Scope: - """Track which target we are processing at any given time.""" - - def __init__(self) -> None: - self.module = None # type: Optional[str] - self.classes = [] # type: List[TypeInfo] - self.function = None # type: Optional[FuncDef] - # Number of nested scopes ignored (that don't get their own separate targets) - self.ignored = 0 - - def current_module_id(self) -> str: - assert self.module - return self.module - - def current_target(self) -> str: - """Return the current target (non-class; for a class return enclosing module).""" - assert self.module - target = self.module - if self.function: - if self.classes: - target += '.' + '.'.join(c.name() for c in self.classes) - target += '.' + self.function.name() - return target - - def current_full_target(self) -> str: - """Return the current target (may be a class).""" - assert self.module - target = self.module - if self.classes: - target += '.' + '.'.join(c.name() for c in self.classes) - if self.function: - target += '.' + self.function.name() - return target - - def enter_file(self, prefix: str) -> None: - self.module = prefix - self.classes = [] - self.function = None - self.ignored = 0 - - def enter_function(self, fdef: FuncDef) -> None: - if not self.function: - self.function = fdef - else: - # Nested functions are part of the topmost function target. - self.ignored += 1 - - def enter_class(self, info: TypeInfo) -> None: - """Enter a class target scope.""" - if not self.function: - self.classes.append(info) - else: - # Classes within functions are part of the enclosing function target. - self.ignored += 1 - - def leave(self) -> None: - """Leave the innermost scope (can be any kind of scope).""" - if self.ignored: - # Leave a scope that's included in the enclosing target. - self.ignored -= 1 - elif self.function: - # Function is always the innermost target. - self.function = None - elif self.classes: - # Leave the innermost class. - self.classes.pop() - else: - # Leave module. - assert self.module - self.module = None - - def get_type_triggers(typ: Type) -> List[str]: """Return all triggers that correspond to a type becoming stale.""" return typ.accept(TypeTriggersVisitor()) diff --git a/mypy/server/update.py b/mypy/server/update.py index bf0cf0499ddd..d8408c6c09a9 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -919,7 +919,10 @@ def update_deps(module_id: str, for deferred in nodes: node = deferred.node type_map = graph[module_id].type_map() - new_deps = get_dependencies_of_target(module_id, node, type_map, options.python_version) + tree = graph[module_id].tree + assert tree is not None, "Tree must be processed at this stage" + new_deps = get_dependencies_of_target(module_id, tree, node, type_map, + options.python_version) for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index 63d27b29471a..84c5ec2f66c9 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -1,7 +1,11 @@ """Test cases for generating node-level dependencies (for fine-grained incremental checking)""" import os -from typing import List, Tuple, Dict, Optional +from typing import List, Tuple, Dict, Optional, Set +MYPY = False +if MYPY: + from typing import DefaultDict +from collections import defaultdict from mypy import build, defaults from mypy.build import BuildSource @@ -33,6 +37,7 @@ class GetDependenciesSuite(DataSuite): def run_case(self, testcase: DataDrivenTestCase) -> None: src = '\n'.join(testcase.input) + dump_all = '# __dump_all__' in src if testcase.name.endswith('python2'): python_version = defaults.PYTHON2_VERSION else: @@ -43,11 +48,13 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: if not a: a = ['Unknown compile error (likely syntax error in test case or fixture)'] else: - deps = {} - for module in dumped_modules: - if module in files: + deps = defaultdict(set) # type: DefaultDict[str, Set[str]] + for module in files: + if module in dumped_modules or dump_all and module not in ('abc', 'typing', + 'mypy_extensions'): new_deps = get_dependencies(files[module], type_map, python_version) - deps.update(new_deps) + for source in new_deps: + deps[source].update(new_deps[source]) for source, targets in sorted(deps.items()): line = '%s -> %s' % (source, ', '.join(sorted(targets))) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 2eb5366d1639..337854d05709 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -2,6 +2,7 @@ from collections import OrderedDict from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable, Dict, Union + from itertools import chain from contextlib import contextmanager @@ -63,9 +64,11 @@ def analyze_type_alias(node: Expression, allow_unnormalized: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, - warn_bound_tvar: bool = False) -> Optional[Type]: - """Return type if node is valid as a type alias rvalue. + warn_bound_tvar: bool = False) -> Optional[Tuple[Type, Set[str]]]: + """Analyze r.h.s. of a (potential) type alias definition. + If `node` is valid as a type alias rvalue, return the resulting type and a set of + full names of type aliases it depends on (directly or indirectly). Return None otherwise. 'node' must have been semantically analyzed. """ # Quickly return None if the expression doesn't look like a type. Note @@ -103,7 +106,7 @@ def analyze_type_alias(node: Expression, arg = lookup_func(node.args[0].name, node.args[0]) if (call is not None and call.node and call.node.fullname() == 'builtins.type' and arg is not None and arg.node and arg.node.fullname() == 'builtins.None'): - return NoneTyp() + return NoneTyp(), set() return None return None else: @@ -120,7 +123,8 @@ def analyze_type_alias(node: Expression, allow_unnormalized=allow_unnormalized, warn_bound_tvar=warn_bound_tvar) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope - return type.accept(analyzer) + res = type.accept(analyzer) + return res, analyzer.aliases_used def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str: @@ -171,6 +175,8 @@ def __init__(self, self.is_typeshed_stub = is_typeshed_stub self.warn_bound_tvar = warn_bound_tvar self.third_pass = third_pass + # Names of type aliases encountered while analysing a type will be collected here. + self.aliases_used = set() # type: Set[str] def visit_unbound_type(self, t: UnboundType) -> Type: if t.optional: @@ -259,6 +265,8 @@ def visit_unbound_type(self, t: UnboundType) -> Type: elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif sym.kind == TYPE_ALIAS: + if sym.alias_name is not None: + self.aliases_used.add(sym.alias_name) override = sym.type_override all_vars = sym.alias_tvars assert override is not None @@ -307,6 +315,9 @@ def visit_unbound_type(self, t: UnboundType) -> Type: self.note_func("Forward references to type variables are prohibited", t) return t info = sym.node # type: TypeInfo + if sym.is_aliasing: + if sym.alias_name is not None: + self.aliases_used.add(sym.alias_name) if len(t.args) > 0 and info.fullname() == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], t.line) return TupleType(self.anal_array(t.args), fallback, t.line) @@ -666,6 +677,7 @@ def __init__(self, self.is_typeshed_stub = is_typeshed_stub self.indicator = indicator self.patches = patches + self.aliases_used = set() # type: Set[str] def visit_instance(self, t: Instance) -> None: info = t.type @@ -796,7 +808,9 @@ def anal_type(self, tp: UnboundType) -> Type: self.options, self.is_typeshed_stub, third_pass=True) - return tp.accept(tpan) + res = tp.accept(tpan) + self.aliases_used = tpan.aliases_used + return res TypeVarList = List[Tuple[str, TypeVarExpr]] diff --git a/test-data/unit/deps-types.test b/test-data/unit/deps-types.test index 4334bff6cbfe..4361defa3960 100644 --- a/test-data/unit/deps-types.test +++ b/test-data/unit/deps-types.test @@ -151,3 +151,552 @@ def h() -> None: -> m, m.h -> m, m.h -> m.f + +-- Type aliases + +[case testAliasDepsNormalMod] +from mod import I +A = I +x: A +[file mod.py] +class I: pass +[out] + -> m + -> m + -> m + -> , m + +[case testAliasDepsNormalModExtended] +# __dump_all__ +import a +x: a.A +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m + -> m + -> m + -> a + -> , m, a, mod.I + +[case testAliasDepsNormalFunc] +from mod import I +A = I +def f(x: A) -> None: + pass +[file mod.py] +class I: pass +[out] + -> m.f + -> m + -> , m, m.f + +[case testAliasDepsNormalFuncExtended] +# __dump_all__ +import a +def f(x: a.A) -> None: + pass +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m.f + -> m + -> a + -> , m.f, a, mod.I + +[case testAliasDepsNormalClass] +from a import A +class C: + x: A +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m.C + -> m + -> , m + +[case testAliasDepsNormalClassBases] +from a import A +class C(A): + pass +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m.C + -> m + -> + -> m, m.C + +[case testAliasDepsGenericMod] +from mod import I, S, D +A = D[I, S] +x: A +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m + -> , m + -> , m + -> , m + +[case testAliasDepsGenericFunc] +from mod import I, S, D +A = D[S, I] +def f(x: A) -> None: + pass +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m.f + -> m + -> , m, m.f + -> , m, m.f + -> , m, m.f + +[case testAliasDepsGenericFuncExtended] +import a +def f(x: a.A) -> None: + pass +[file a.py] +from mod import I, S, D +A = D[S, I] +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m.f + -> m + -> , m.f + -> , m.f + -> , m.f + +[case testAliasDepsGenericClass] +from mod import I, D, S, T +A = D[S, T] +class C: + x: A[I] +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m.C + -> m + -> , m + -> , m + -> , m + -> m + +[case testAliasDepsForwardMod] +from mod import I +x: A +A = I +[file mod.py] +from typing import TypeVar, Generic +class I: pass +[out] + -> m + -> m + -> m + -> , m + +[case testAliasDepsForwardFunc] +from mod import I +def f(x: A) -> None: + pass +A = I +[file mod.py] +class I: pass +[out] + -> m.f + -> m + -> , m, m.f + +[case testAliasDepsForwardClass] +from mod import I +class C: + x: A +A = I +[file mod.py] +class I: pass +[out] + -> m + -> m.C + -> m + -> , m + +[case testAliasDepsChainedMod] +from mod import I +A = I +B = A +x: B +[file mod.py] +class I: pass +[out] + -> m + -> m + -> m + -> m + -> , m + +[case testAliasDepsChainedFunc] +from mod import I +A = I +B = A +def f(x: B) -> None: + pass +[file mod.py] +class I: pass +[out] + -> m + -> m.f + -> m + -> , m, m.f + +[case testAliasDepsChainedFuncExtended] +import a +B = a.A +def f(x: B) -> None: + pass +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m.f + -> m + -> m + -> m + -> , m, m.f + +[case testAliasDepsChainedClass] +from mod import I +A = I +B = A +class C(B): + pass +[file mod.py] +class I: pass +[out] + -> m + -> m + -> m.C + -> , m + -> m, m.C + +[case testAliasDepsNestedMod] +from mod import I, S, D +A = D[S, I] +B = D[S, A] +x: B +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m + -> m + -> , m + -> , m + -> , m + +[case testAliasDepsNestedModExtended] +# __dump_all__ +from mod import S, D +import a +B = D[S, a.A] +x: B +[file a.py] +from mod import I, S, D +A = D[S, I] +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m + -> m + -> m, a + -> , m, a, mod.D + -> , m, a, mod.I + -> , m, a, mod.S + -> mod.D + -> mod.D + +[case testAliasDepsNestedFunc] +from mod import I, S, D +A = D[S, I] +B = D[S, A] +def f(x: B) -> None: + pass +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m.f + -> m + -> , m, m.f + -> , m, m.f + -> , m, m.f + +[case testAliasDepsNestedFuncExtended] +# __dump_all__ +from mod import S, D +import a +B = D[S, a.A] +def f(x: B) -> None: + pass +[file a.py] +from mod import I, S, D +A = D[S, I] +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m.f + -> m + -> m + -> m, a + -> , m, m.f, a, mod.D + -> , m, m.f, a, mod.I + -> , m, m.f, a, mod.S + -> mod.D + -> mod.D + +[case testAliasDepsNestedFuncDirect] +from mod import I, S, D +A = D[S, I] +E = D +def f(x: E[S, A]) -> None: + pass +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m.f + -> m.f + -> m + -> , m, m.f + -> , m, m.f + -> , m, m.f + +[case testAliasDepsNestedClass] +from mod import I, S, D +A = D[S, I] +B = D[S, A] +class C: + x: B +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m.C + -> m + -> , m + -> , m + -> , m + +[case testAliasDepsCast] +from typing import cast +from mod import I +A = I +def fun() -> None: + x = cast(A, 42) +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m.fun + -> m + -> m, m.fun + +[case testAliasDepsRuntime] +from mod import I, S, D +A = I +x = D[S, A]() +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m + -> , m + -> m + -> , m + -> , m + +[case testAliasDepsRuntimeExtended] +# __dump_all__ +from mod import I, S, D +import a +x = D[S, a.A]() +[file a.py] +from mod import I +A = I +[file mod.py] +from typing import TypeVar, Generic +T = TypeVar('T') +U = TypeVar('U') +class D(Generic[T, U]): pass +class I: pass +class S: pass +[out] + -> m + -> m + -> m + -> m + -> , m, mod.D + -> a + -> , m, a, mod.I + -> , m, mod.S + -> mod.D + -> mod.D + +[case testAliasDepsNamedTuple] +from typing import NamedTuple +from mod import I +A = I +class P(NamedTuple): + x: A +[file mod.py] +class I: pass +[out] + -> m + -> m.P + -> m + -> , , m, m.P + +[case testAliasDepsNamedTupleFunctional] +# __dump_all__ +from typing import NamedTuple +import a +P = NamedTuple('P', [('x', a.A)]) +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[out] + -> m + -> m + -> a + -> , , m, a, mod.I + +[case testAliasDepsTypedDict] +from mypy_extensions import TypedDict +from mod import I +A = I +class P(TypedDict): + x: A +[file mod.py] +class I: pass +[builtins fixtures/dict.pyi] +[out] + -> m + -> m.P + -> m + -> , m, m.P + -> m + +[case testAliasDepsTypedDictFunctional] +# __dump_all__ +from mypy_extensions import TypedDict +import a +P = TypedDict('P', {'x': a.A}) +[file a.py] +from mod import I +A = I +[file mod.py] +class I: pass +[builtins fixtures/dict.pyi] +[out] + -> m + -> m + -> a + -> , a, mod.I + -> m + +[case testAliasDepsClassInFunction] +from mod import I +A = I +def f() -> None: + class C: + x: A +[file mod.py] +class I: pass +[out] + -> m.f + -> m.f + -> m + -> , m, m.f diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 5e5d81294de9..841b5648c077 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -618,3 +618,65 @@ p = Point(dict(x=42, y=1337)) [out] __main__.Point __main__.p + +[case testTypeAliasSimple] +A = int +B = int +[file next.py] +A = str +B = int +[out] +__main__.A + +[case testTypeAliasGeneric] +from typing import List +A = List[int] +B = List[int] +[file next.py] +from typing import List +A = List[str] +B = List[int] +[builtins fixtures/list.pyi] +[out] +__main__.A + +[case testTypeAliasGenToNonGen] +from typing import List +A = List[str] +B = List +[file next.py] +from typing import List +A = List +B = List +[builtins fixtures/list.pyi] +[out] +__main__.A + +[case testTypeAliasNonGenToGen] +from typing import List +A = List +B = List +[file next.py] +from typing import List +A = List[str] +B = List +[builtins fixtures/list.pyi] +[out] +__main__.A + +[case testTypeAliasGenericTypeVar] +from typing import TypeVar, Dict +T = TypeVar('T') +S = TypeVar('S') +A = Dict[str, T] +B = Dict[str, S] +[file next.py] +from typing import TypeVar, Dict +class T: pass +S = TypeVar('S') +A = Dict[str, T] +B = Dict[str, S] +[builtins fixtures/dict.pyi] +[out] +__main__.A +__main__.T diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 59ddaf3a6b47..e1597e9859ef 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1797,6 +1797,564 @@ def foo(x: Point) -> int: == b.py:3: error: Unsupported operand types for + ("int" and "str") +[case testBasicAliasUpdate] +import b +[file a.py] +N = int +x = 1 +[file a.py.2] +N = str +x = 'hi' +[file b.py] +import a +def f(x: a.N) -> None: + pass +f(a.x) +[out] +== + +[case testBasicAliasUpdateGeneric] +import b +[file a.py] +from typing import Dict, TypeVar +T = TypeVar('T') +D = Dict[int, T] +x = {1: 1} +[file a.py.2] +from typing import Dict, TypeVar +T = TypeVar('T') +D = Dict[str, T] +x = {'hi': 1} +[file b.py] +import a +def f(x: a.D[int]) -> None: + pass +f(a.x) +[builtins fixtures/dict.pyi] +[out] +== + +[case testAliasFineNormalMod] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file b.py] +import a +x: a.A = int() +[out] +== +b.py:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineNormalFunc] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file b.py] +import a +def f(x: a.A): + x = int() +[out] +== +b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineNormalClass] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file b.py] +import a +class C: + x: a.A +c = C() +c.x = int() +[out] +== +b.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineNormalClassBases] +import b +[file a.py] +import c +A = c.BaseI +[file a.py.2] +import c +A = c.BaseS +[file b.py] +import a +class C(a.A): + x = int() +[file c.py] +class BaseI: + x: int +class BaseS: + x: str +[out] +== +b.py:3: error: Incompatible types in assignment (expression has type "int", base class "BaseS" defined the type as "str") + +[case testAliasFineGenericMod] +import b +[file a.py] +from typing import Dict +A = Dict[str, int] +[file a.py.2] +from typing import Dict +A = Dict[str, str] +[file b.py] +import a +x: a.A = {str(): int()} +[builtins fixtures/dict.pyi] +[out] +== +b.py:2: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" + +[case testAliasFineGenericFunc] +import b +[file a.py] +from typing import Dict +A = Dict[str, int] +[file a.py.2] +from typing import Dict +A = Dict[str, str] +[file b.py] +import a +def f(x: a.A): + pass +f({str(): int()}) +[builtins fixtures/dict.pyi] +[out] +== +b.py:4: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" + +[case testAliasFineForwardMod] +import b +[file b.py] +x: A = int() +A = int +[file b.py.2] +x: A = int() +A = str +[out] +== +b.py:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineForwardFunc] +import b +[file b.py] +def f(x: A): + x = int() +A = int +[file b.py.2] +def f(x: A): + x = int() +A = str +[out] +== +b.py:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineChainedFunc] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file aa.py] +import a +B = a.A +[file b.py] +import aa +def f(x: aa.B): + x = int() +[out] +== +b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineChainedClass] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file aa.py] +import a +B = a.A +[file b.py] +import aa +class C: + x: aa.B +c = C() +c.x = int() +[out] +== +b.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineNestedMod] +import b +[file a.py] +from typing import Dict +A = Dict[str, int] +[file a.py.2] +from typing import Dict +A = Dict[str, str] +[file aa.py] +from typing import Dict +import a +B = Dict[str, a.A] +[file b.py] +import aa +x: aa.B = {'first': {str(): int()}} +[builtins fixtures/dict.pyi] +[out] +== +b.py:2: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" + +[case testAliasFineNestedFunc] +import b +[file a.py] +from typing import Dict +A = Dict[str, int] +[file a.py.2] +from typing import Dict +A = Dict[str, str] +[file aa.py] +from typing import Dict +import a +B = Dict[str, a.A] +[file b.py] +import aa +def f(x: aa.B): + x = {'first': {str(): int()}} +[builtins fixtures/dict.pyi] +[out] +== +b.py:3: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" + +[case testAliasFineNestedFuncDirect] +import b +[file a.py] +from typing import Dict +A = Dict[str, int] +[file a.py.2] +from typing import Dict +A = Dict[str, str] +[file aa.py] +from typing import Dict +import a +E = Dict +[file b.py] +import aa +def f(x: aa.E[str, aa.a.A]): + x = {'first': {str(): int()}} +[builtins fixtures/dict.pyi] +[out] +== +b.py:3: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" + +[case testAliasFineNonGenericToGeneric] +import b +[file a.py] +from typing import Dict, TypeVar +T = TypeVar('T') +A = Dict[T, int] +[file a.py.2] +A = str +[file b.py] +import a +def f(x: a.A[str]): + pass +[builtins fixtures/dict.pyi] +[out] +== +b.py:2: error: "str" expects no type arguments, but 1 given + +[case testAliasFineGenericToNonGeneric] +import b +[file a.py] +A = str +[file a.py.2] +from typing import Dict, TypeVar +T = TypeVar('T') +A = Dict[T, int] +[file b.py] +import a +def f(x: a.A): + pass +reveal_type(f) +[builtins fixtures/dict.pyi] +[out] +b.py:4: error: Revealed type is 'def (x: builtins.str) -> Any' +== +b.py:4: error: Revealed type is 'def (x: builtins.dict[Any, builtins.int]) -> Any' + +[case testAliasFineChangedNumberOfTypeVars] +import b +[file a.py] +from typing import Dict, TypeVar +T = TypeVar('T') +A = Dict[T, int] +[file a.py.2] +from typing import Dict, TypeVar +T = TypeVar('T') +S = TypeVar('S') +A = Dict[T, S] +[file b.py] +import a +def f(x: a.A[str]): + pass +[builtins fixtures/dict.pyi] +[out] +== +b.py:2: error: Bad number of arguments for type alias, expected: 2, given: 1 + +[case testAliasFineAdded] +import b +[file a.py] +[file a.py.2] +A = int +[file b.py] +import a +x: a.A +[out] +b.py:2: error: Name 'a.A' is not defined +== + +[case testAliasFineDeleted] +import b +[file a.py] +A = int +[file a.py.2] +[file b.py] +import a +x: a.A +[out] +== +b.py:2: error: Name 'a.A' is not defined + +[case testAliasFineClassToAlias] +import b +[file a.py] +class A: pass +[file a.py.2] +A = int +[file b.py] +import a +x: a.A +x = 1 +[out] +b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "A") +== + +[case testAliasFineAliasToClass] +import b +[file a.py] +A = int +[file a.py.2] +class A: pass +[file b.py] +import a +x: a.A +x = 1 +[out] +== +b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "A") + +[case testAliasFineComponentDeleted] +import b +[file a.py] +class B: pass +[file a.py.2] +x = 1 +[file b.py] +import a +from typing import Dict, TypeVar +T = TypeVar('T') +A = Dict[T, a.B] +def f(x: A[int]): + pass +[builtins fixtures/dict.pyi] +[out] +== +b.py:4: error: Name 'a.B' is not defined + +[case testAliasFineTargetDeleted] +import c +[file a.py] +A = int +[file b.py] +import a +B = a.A +[file b.py.2] +x = 1 +[file c.py] +import b +def f(x: b.B): + pass +[out] +== +c.py:2: error: Name 'b.B' is not defined + +[case testAliasFineClassInFunction] +import b +[file a.py] +A = int +[file a.py.2] +A = str +[file b.py] +import a +def f() -> None: + class C: + x: a.A = int() +[out] +== +b.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testAliasFineInitNormalMod] +import c +[file a.py] +class A: + def __init__(self, x: int) -> None: + pass +[file a.py.2] +class A: + def __init__(self, x: str) -> None: + pass +[file b.py] +import a +B = a.A +[file c.py] +from b import B +B(int()) +[out] +== +c.py:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" + +[case testAliasFineInitNormalFunc] +import c +[file a.py] +class A: + def __init__(self, x: int) -> None: + pass +[file a.py.2] +class A: + def __init__(self, x: str) -> None: + pass +[file b.py] +import a +B = a.A +[file c.py] +from b import B +def f() -> None: + B(int()) +[out] +== +c.py:3: error: Argument 1 to "A" has incompatible type "int"; expected "str" + +[case testAliasFineInitGenericMod] +import c +[file a.py] +from typing import Generic, TypeVar +T = TypeVar('T') +S = TypeVar('S') +class A(Generic[T, S]): + def __init__(self, x: T) -> None: + pass +[file a.py.2] +from typing import Generic, TypeVar +T = TypeVar('T') +S = TypeVar('S') +class A(Generic[T, S]): + def __init__(self, x: S) -> None: + pass +[file b.py] +import a +B = a.A[int, str] +[file c.py] +from b import B +B(int()) +[out] +== +c.py:2: error: Argument 1 has incompatible type "int"; expected "str" + +[case testAliasFineInitGenericFunc] +import c +[file a.py] +from typing import Generic, TypeVar +T = TypeVar('T') +S = TypeVar('S') +class A(Generic[T, S]): + def __init__(self, x: T) -> None: + pass +[file a.py.2] +from typing import Generic, TypeVar +T = TypeVar('T') +S = TypeVar('S') +class A(Generic[T, S]): + def __init__(self, x: S) -> None: + pass +[file b.py] +import a +B = a.A[int, str] +[file c.py] +from b import B +def f() -> None: + B(str()) +[out] +c.py:3: error: Argument 1 has incompatible type "str"; expected "int" +== + +[case testAliasFineInitChainedMod] +import d +[file a.py] +class A: + def __init__(self, x: int) -> None: + pass +[file a.py.2] +class A: + def __init__(self, x: str) -> None: + pass +[file b.py] +import a +B = a.A +[file c.py] +import b +C = b.B +[file d.py] +from c import C +C(int()) +[out] +== +d.py:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" + +[case testAliasFineInitChainedFunc] +import d +[file a.py] +class A: + def __init__(self, x: int) -> None: + pass +[file a.py.2] +class A: + def __init__(self, x: str) -> None: + pass +[file b.py] +import a +B = a.A +[file c.py] +import b +C = b.B +[file d.py] +from c import C +def f() -> None: + C(str()) +[out] +d.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" +== + [case testNonePartialType] import a a.y