Skip to content

Foundation for fine-grained incremental checking #2838

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 45 commits into from
Apr 5, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
d126ce4
Add foundation for fine-grained incremental type checking
JukkaL Jan 20, 2017
c690fec
Attempt to fix tests on Windows
JukkaL Feb 9, 2017
38d69aa
Try to work around travis failure
JukkaL Feb 10, 2017
3b98a1d
Add missing import
JukkaL Mar 2, 2017
4f3b192
Another attempt to fix the travis build
JukkaL Mar 2, 2017
da6631c
Fix typos
JukkaL Mar 3, 2017
2799278
Support multiple rounds of event propagation
JukkaL Feb 9, 2017
28d8279
Keep track of targets that generated an error
JukkaL Feb 11, 2017
f38024b
Refactor and continue reporting error if no changes
JukkaL Mar 3, 2017
01ed71c
Fix bugs
JukkaL Mar 3, 2017
709740a
Create dependencies for inheritance
JukkaL Mar 7, 2017
3a67189
Detect differences in MRO
JukkaL Mar 7, 2017
42a77b6
Merge base classes and MRO
JukkaL Mar 7, 2017
093d17d
Add support for minimal debug output
JukkaL Mar 7, 2017
5a6eb05
Add test cases
JukkaL Mar 7, 2017
2763794
Fix to attributes, inheritance and fine-grained incremenal
JukkaL Mar 7, 2017
d9cfc3f
Fix handling changes to attributes in base classes
JukkaL Mar 8, 2017
07bc54b
Fixes to dependency generation
JukkaL Mar 8, 2017
d95864b
Support classes as fine-grained incremental targets
JukkaL Mar 8, 2017
baeb421
Fix inheritance test case
JukkaL Mar 8, 2017
41c81aa
Add minimal package support
JukkaL Mar 8, 2017
82afe22
Add tests for __init__ modules
JukkaL Mar 8, 2017
e5199da
Add test cases for module attributes
JukkaL Mar 8, 2017
84b7a62
Fix test case
JukkaL Mar 9, 2017
67a54d7
Implement multiple propagation steps for module attributes
JukkaL Mar 9, 2017
54c800d
Support constructors for fine-grained incremental
JukkaL Mar 9, 2017
770cc61
Support from m import with fine-grained incremental
JukkaL Mar 9, 2017
2f0c0a7
Support nested classes with fine-grained incremental
JukkaL Mar 10, 2017
b480807
Fix merge test case
JukkaL Mar 10, 2017
0636545
Remove debug print
JukkaL Mar 10, 2017
e271844
More nested class test cases
JukkaL Mar 10, 2017
291286f
Fixes to classes with fine-grained incremental
JukkaL Mar 10, 2017
25a2846
Minor fixes
JukkaL Mar 21, 2017
2b9104c
Add review feedback
JukkaL Mar 28, 2017
ef97a75
Address more feedback and fix a bug
JukkaL Mar 29, 2017
23ca75b
Add additional debug output
JukkaL Mar 30, 2017
027f79b
Fix issues caused by rebase
JukkaL Apr 3, 2017
dbd5d67
Remove travis CI workaround
JukkaL Apr 3, 2017
a47ecb8
Fix another issue caused by the rebase
JukkaL Apr 3, 2017
ba17fe4
Fix flaky test case
JukkaL Apr 3, 2017
d3bf923
Fix flaky test by making processing order deterministic
JukkaL Apr 3, 2017
5c03d7e
Attempt to fix tests on Windows
JukkaL Apr 4, 2017
89adec4
Fix deferred lambdas
JukkaL Apr 5, 2017
287f831
Fix self check failure
JukkaL Apr 5, 2017
43d16e4
Merge branch 'master' into fine-grained
JukkaL Apr 5, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 13 additions & 11 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,9 @@ class BuildResult:
errors: List of error messages.
"""

def __init__(self, manager: 'BuildManager') -> None:
def __init__(self, manager: 'BuildManager', graph: Graph) -> None:
self.manager = manager
self.graph = graph
self.files = manager.modules
self.types = manager.all_types
self.errors = manager.errors.messages()
Expand Down Expand Up @@ -184,8 +185,8 @@ def build(sources: List[BuildSource],
)

try:
dispatch(sources, manager)
return BuildResult(manager)
graph = dispatch(sources, manager)
return BuildResult(manager, graph)
finally:
manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
(time.time() - manager.start_time,
Expand Down Expand Up @@ -474,7 +475,7 @@ def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> My
return tree

def module_not_found(self, path: str, line: int, id: str) -> None:
self.errors.set_file(path)
self.errors.set_file(path, id)
stub_msg = "(Stub files are from https://github.com/python/typeshed)"
if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
(self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
Expand Down Expand Up @@ -1230,7 +1231,7 @@ def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None:
# so we'd need to cache the decision.
manager = self.manager
manager.errors.set_import_context([])
manager.errors.set_file(ancestor_for.xpath)
manager.errors.set_file(ancestor_for.xpath, ancestor_for.id)
manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,),
severity='note', only_once=True)
manager.errors.report(-1, -1,
Expand All @@ -1242,7 +1243,7 @@ def skipping_module(self, id: str, path: str) -> None:
manager = self.manager
save_import_context = manager.errors.import_context()
manager.errors.set_import_context(self.caller_state.import_context)
manager.errors.set_file(self.caller_state.xpath)
manager.errors.set_file(self.caller_state.xpath, self.caller_state.id)
line = self.caller_line
manager.errors.report(line, 0,
"Import of '%s' ignored" % (id,),
Expand Down Expand Up @@ -1429,7 +1430,7 @@ def parse_file(self) -> None:
continue
if id == '':
# Must be from a relative import.
manager.errors.set_file(self.xpath)
manager.errors.set_file(self.xpath, self.id)
manager.errors.report(line, 0,
"No parent module -- cannot perform relative import",
blocker=True)
Expand Down Expand Up @@ -1545,20 +1546,21 @@ def write_cache(self) -> None:
self.interface_hash = new_interface_hash


def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
manager.log("Mypy version %s" % __version__)
graph = load_graph(sources, manager)
if not graph:
print("Nothing to do?!")
return
return graph
manager.log("Loaded graph with %d nodes" % len(graph))
if manager.options.dump_graph:
dump_graph(graph)
return
return graph
process_graph(graph, manager)
if manager.options.warn_unused_ignores:
# TODO: This could also be a per-module option.
manager.errors.generate_unused_ignore_notes()
return graph


class NodeInfo:
Expand Down Expand Up @@ -1633,7 +1635,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
except ModuleNotFound:
continue
if st.id in graph:
manager.errors.set_file(st.xpath)
manager.errors.set_file(st.xpath, st.id)
manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id)
manager.errors.raise_error()
graph[st.id] = st
Expand Down
89 changes: 59 additions & 30 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,16 @@
LAST_PASS = 1 # Pass numbers start at 0


# A node which is postponed to be type checked during the next pass.
# A node which is postponed to be processed during the next pass.
# This is used for both batch mode and fine-grained incremental mode.
DeferredNode = NamedTuple(
'DeferredNode',
[
('node', FuncItem),
# In batch mode only FuncDef and LambdaExpr are supported
('node', Union[FuncDef, LambdaExpr, MypyFile]),
('context_type_name', Optional[str]), # Name of the surrounding class (for error messages)
('active_class', Optional[Type]), # And its type (for selftype handling)
('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis
# self type handling)
])


Expand Down Expand Up @@ -167,7 +170,7 @@ def check_first_pass(self) -> None:

Deferred functions will be processed by check_second_pass().
"""
self.errors.set_file(self.path)
self.errors.set_file(self.path, self.tree.fullname())
with self.enter_partial_types():
with self.binder.top_frame_context():
for d in self.tree.defs:
Expand All @@ -187,38 +190,57 @@ def check_first_pass(self) -> None:
self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
all_.node)

def check_second_pass(self) -> bool:
def check_second_pass(self, todo: List[DeferredNode] = None) -> bool:
"""Run second or following pass of type checking.

This goes through deferred nodes, returning True if there were any.
"""
if not self.deferred_nodes:
if not todo and not self.deferred_nodes:
return False
self.errors.set_file(self.path)
self.errors.set_file(self.path, self.tree.fullname())
self.pass_num += 1
todo = self.deferred_nodes
if not todo:
todo = self.deferred_nodes
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hm, what if todo and self.deferred_nodes are both non-empty?

else:
assert not self.deferred_nodes
self.deferred_nodes = []
done = set() # type: Set[FuncItem]
for node, type_name, active_class in todo:
done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]]
for node, type_name, active_typeinfo in todo:
if node in done:
continue
# This is useful for debugging:
# print("XXX in pass %d, class %s, function %s" %
# (self.pass_num, type_name, node.fullname() or node.name()))
done.add(node)
with self.errors.enter_type(type_name) if type_name else nothing():
with self.scope.push_class(active_class) if active_class else nothing():
if isinstance(node, Statement):
self.accept(node)
elif isinstance(node, Expression):
self.expr_checker.accept(node)
else:
assert False
with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing():
self.check_partial(node)
return True

def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None:
if isinstance(node, MypyFile):
self.check_top_level(node)
elif isinstance(node, LambdaExpr):
self.expr_checker.accept(node)
else:
self.accept(node)

def check_top_level(self, node: MypyFile) -> None:
"""Check only the top-level of a module, skipping function definitions."""
with self.enter_partial_types():
with self.binder.top_frame_context():
for d in node.defs:
# TODO: Type check class bodies.
if not isinstance(d, (FuncDef, ClassDef)):
d.accept(self)

assert not self.current_node_deferred
# TODO: Handle __all__

def handle_cannot_determine_type(self, name: str, context: Context) -> None:
node = self.scope.top_function()
if self.pass_num < LAST_PASS and node is not None:
if (self.pass_num < LAST_PASS and node is not None
and isinstance(node, (FuncDef, LambdaExpr))):
# Don't report an error yet. Just defer.
if self.errors.type_name:
type_name = self.errors.type_name[-1]
Expand Down Expand Up @@ -635,7 +657,7 @@ def is_implicit_any(t: Type) -> bool:
for i in range(len(typ.arg_types)):
arg_type = typ.arg_types[i]

ref_type = self.scope.active_class()
ref_type = self.scope.active_self_type() # type: Optional[Type]
if (isinstance(defn, FuncDef) and ref_type is not None and i == 0
and not defn.is_static
and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]):
Expand Down Expand Up @@ -946,7 +968,7 @@ def check_method_override_for_base_with_name(
# The name of the method is defined in the base class.

# Construct the type of the overriding method.
typ = bind_self(self.function_type(defn), self.scope.active_class())
typ = bind_self(self.function_type(defn), self.scope.active_self_type())
# Map the overridden method type to subtype context so that
# it can be checked for compatibility.
original_type = base_attr.type
Expand All @@ -959,7 +981,7 @@ def check_method_override_for_base_with_name(
assert False, str(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
bind_self(original_type, self.scope.active_class()),
bind_self(original_type, self.scope.active_self_type()),
defn.info, base)
# Check that the types are compatible.
# TODO overloaded signatures
Expand Down Expand Up @@ -1051,7 +1073,7 @@ def visit_class_def(self, defn: ClassDef) -> None:
old_binder = self.binder
self.binder = ConditionalTypeBinder()
with self.binder.top_frame_context():
with self.scope.push_class(fill_typevars(defn.info)):
with self.scope.push_class(defn.info):
self.accept(defn.defs)
self.binder = old_binder
if not defn.has_incompatible_baseclass:
Expand Down Expand Up @@ -1317,8 +1339,8 @@ def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue:
# Class-level function objects and classmethods become bound
# methods: the former to the instance, the latter to the
# class
base_type = bind_self(base_type, self.scope.active_class())
compare_type = bind_self(compare_type, self.scope.active_class())
base_type = bind_self(base_type, self.scope.active_self_type())
compare_type = bind_self(compare_type, self.scope.active_self_type())

# If we are a static method, ensure to also tell the
# lvalue it now contains a static method
Expand Down Expand Up @@ -1347,7 +1369,8 @@ def lvalue_type_from_base(self, expr_node: Var,

if base_type:
if not has_no_typevars(base_type):
instance = cast(Instance, self.scope.active_class())
# TODO: Handle TupleType, don't cast
instance = cast(Instance, self.scope.active_self_type())
itype = map_instance_to_supertype(instance, base)
base_type = expand_type_by_instance(base_type, itype)

Expand Down Expand Up @@ -2996,7 +3019,7 @@ def is_node_static(node: Node) -> Optional[bool]:

class Scope:
# We keep two stacks combined, to maintain the relative order
stack = None # type: List[Union[Type, FuncItem, MypyFile]]
stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]]

def __init__(self, module: MypyFile) -> None:
self.stack = [module]
Expand All @@ -3007,20 +3030,26 @@ def top_function(self) -> Optional[FuncItem]:
return e
return None

def active_class(self) -> Optional[Type]:
if isinstance(self.stack[-1], Type):
def active_class(self) -> Optional[TypeInfo]:
if isinstance(self.stack[-1], TypeInfo):
return self.stack[-1]
return None

def active_self_type(self) -> Optional[Union[Instance, TupleType]]:
info = self.active_class()
if info:
return fill_typevars(info)
return None

@contextmanager
def push_function(self, item: FuncItem) -> Iterator[None]:
self.stack.append(item)
yield
self.stack.pop()

@contextmanager
def push_class(self, t: Type) -> Iterator[None]:
self.stack.append(t)
def push_class(self, info: TypeInfo) -> Iterator[None]:
self.stack.append(info)
yield
self.stack.pop()

Expand Down
Loading