From 7e22778326c3ec4c07011bec567db7cc68fc0d6d Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 21 Feb 2018 17:33:02 -0800 Subject: [PATCH 01/10] Less "Skipping" debug spew --- mypy/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy/build.py b/mypy/build.py index ac8436ee10af..237361a69b83 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1623,7 +1623,8 @@ def __init__(self, self.ignore_all = True else: # In 'error' mode, produce special error messages. - manager.log("Skipping %s (%s)" % (path, id)) + if id not in manager.missing_modules: + manager.log("Skipping %s (%s)" % (path, id)) if follow_imports == 'error': if ancestor_for: self.skipping_ancestor(id, path, ancestor_for) From c21e21653e4a6da008795cb056851b767f1af9ce Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 21 Feb 2018 18:21:01 -0800 Subject: [PATCH 02/10] Remove the all_are_submodules handling --- mypy/build.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 237361a69b83..09af1f9fd086 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -658,23 +658,15 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: elif isinstance(imp, ImportFrom): cur_id = correct_rel_imp(imp) pos = len(res) - all_are_submodules = True # Also add any imported names that are submodules. pri = import_priority(imp, PRI_MED) for name, __ in imp.names: sub_id = cur_id + '.' + name if self.is_module(sub_id): res.append((pri, sub_id, imp.line)) - else: - all_are_submodules = False - # If all imported names are submodules, don't add - # cur_id as a dependency. Otherwise (i.e., if at - # least one imported name isn't a submodule) - # cur_id is also a dependency, and we should - # insert it *before* any submodules. - if not all_are_submodules: - pri = import_priority(imp, PRI_HIGH) - res.insert(pos, ((pri, cur_id, imp.line))) + + pri = import_priority(imp, PRI_HIGH) + res.insert(pos, ((pri, cur_id, imp.line))) elif isinstance(imp, ImportAll): pri = import_priority(imp, PRI_HIGH) res.append((pri, correct_rel_imp(imp), imp.line)) From d8442277f430bc2162f71507dc94c87dea47a671 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 21 Feb 2018 17:31:04 -0800 Subject: [PATCH 03/10] Eliminate serializing into and out of SavedCache --- mypy/build.py | 7 +- mypy/checker.py | 10 ++ mypy/dmypy_server.py | 7 +- mypy/server/update.py | 112 +++++------------------ mypy/test/testcheck.py | 1 + test-data/unit/fine-grained-cycles.test | 39 +------- test-data/unit/fine-grained-modules.test | 9 +- test-data/unit/fine-grained.test | 4 +- 8 files changed, 49 insertions(+), 140 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 09af1f9fd086..5c6ed710ceb9 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2186,13 +2186,16 @@ def dump_graph(graph: Graph) -> None: print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]") -def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph: +def load_graph(sources: List[BuildSource], manager: BuildManager, + old_graph: Optional[Graph] = None) -> Graph: """Given some source files, load the full dependency graph. As this may need to parse files, this can raise CompileError in case there are syntax errors. """ - graph = {} # type: Graph + + graph = old_graph or {} # type: Graph + # The deque is used to implement breadth-first traversal. # TODO: Consider whether to go depth-first instead. This may # affect the order in which we process files within import cycles. diff --git a/mypy/checker.py b/mypy/checker.py index 922238739475..77b8e6240b64 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -216,6 +216,16 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True + def reset(self) -> None: + """Cleanup stale state that might be left over from a typechecking run. + + This allows us to reuse TypeChecker objects in fine-grained + incremental mode. + """ + self.partial_reported.clear() + assert self.partial_types == [] + assert self.deferred_nodes == [] + def check_first_pass(self) -> None: """Type check the entire file, but defer functions with unresolved references. diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index cf4b15e3e6aa..c423053d2e9e 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -282,10 +282,11 @@ def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict if self.options.use_fine_grained_cache: # Pull times and hashes out of the saved_cache and stick them into # the fswatcher, so we pick up the changes. - for meta, mypyfile, type_map in manager.saved_cache.values(): - if meta.mtime is None: continue + for state in self.fine_grained_manager.graph.values(): + meta = state.meta + if meta is None: continue self.fswatcher.set_file_data( - mypyfile.path, + state.xpath, FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash)) # Run an update diff --git a/mypy/server/update.py b/mypy/server/update.py index d8408c6c09a9..e09174b91059 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -161,6 +161,7 @@ def __init__(self, self.previous_modules = get_module_to_path_map(manager) self.deps = get_all_dependencies(manager, graph, self.options) self.previous_targets_with_errors = manager.errors.targets() + self.graph = graph # Module, if any, that had blocking errors in the last run as (id, path) tuple. # TODO: Handle blocking errors in the initial build self.blocking_error = None # type: Optional[Tuple[str, str]] @@ -170,8 +171,7 @@ def __init__(self, # for the cache. This is kind of a hack and it might be better to have # this directly reflected in load_graph's interface. self.options.cache_dir = os.devnull - mark_all_meta_as_memory_only(graph, manager) - manager.saved_cache = preserve_full_cache(graph, manager) + manager.saved_cache = {} self.type_maps = extract_type_maps(graph) # Active triggers during the last update self.triggered = [] # type: List[str] @@ -261,7 +261,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], old_snapshots[module] = snapshot manager.errors.reset() - result = update_single_isolated(module, path, manager, previous_modules) + result = update_single_isolated(module, path, manager, previous_modules, self.graph) if isinstance(result, BlockedUpdate): # Blocking error -- just give up module, path, remaining, errors = result @@ -294,23 +294,13 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], if state.tree is None and id in manager.saved_cache: meta, tree, type_map = manager.saved_cache[id] state.tree = tree - mark_all_meta_as_memory_only(graph, manager) - manager.saved_cache = preserve_full_cache(graph, manager) self.previous_modules = get_module_to_path_map(manager) self.type_maps = extract_type_maps(graph) - return manager.errors.new_messages(), remaining, (module, path), False - + # XXX: I want us to not need this + self.graph = graph -def mark_all_meta_as_memory_only(graph: Dict[str, State], - manager: BuildManager) -> None: - for id, state in graph.items(): - if id in manager.saved_cache: - # Don't look at disk. - old = manager.saved_cache[id] - manager.saved_cache[id] = (old[0]._replace(memory_only=True), - old[1], - old[2]) + return manager.errors.new_messages(), remaining, (module, path), False def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], @@ -350,7 +340,8 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], def update_single_isolated(module: str, path: str, manager: BuildManager, - previous_modules: Dict[str, str]) -> UpdateResult: + previous_modules: Dict[str, str], + graph: Graph) -> UpdateResult: """Build a new version of one changed module only. Don't propagate changes to elsewhere in the program. Raise CompleError on @@ -371,11 +362,11 @@ def update_single_isolated(module: str, old_modules = dict(manager.modules) sources = get_sources(previous_modules, [(module, path)]) - invalidate_stale_cache_entries(manager.saved_cache, [(module, path)]) + invalidate_stale_cache_entries(manager.saved_cache, graph, [(module, path)]) manager.missing_modules.clear() try: - graph = load_graph(sources, manager) + load_graph(sources, manager, graph) except CompileError as err: # Parse error somewhere in the program -- a blocker assert err.module_with_blocker @@ -437,6 +428,7 @@ def update_single_isolated(module: str, replace_modules_with_new_variants(manager, graph, old_modules, new_modules) # Perform type checking. + state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() state.compute_fine_grained_deps() @@ -488,8 +480,9 @@ def delete_module(module_id: str, manager.log_fine_grained('delete module %r' % module_id) # TODO: Deletion of a package # TODO: Remove deps for the module (this only affects memory use, not correctness) - assert module_id not in graph new_graph = graph.copy() + if module_id in new_graph: + del new_graph[module_id] if module_id in manager.modules: del manager.modules[module_id] if module_id in manager.saved_cache: @@ -529,9 +522,11 @@ def get_sources(modules: Dict[str, str], sources = [BuildSource(path, id, None) for id, path in items if os.path.isfile(path)] + sources = [] for id, path in changed_modules: - if os.path.isfile(path) and id not in modules: + if os.path.isfile(path):# and id not in modules: sources.append(BuildSource(path, id, None)) + # print(changed_modules, sources) return sources @@ -549,75 +544,14 @@ def get_all_changed_modules(root_module: str, return changed_modules -def preserve_full_cache(graph: Graph, manager: BuildManager) -> SavedCache: - """Preserve every module with an AST in the graph, including modules with errors.""" - saved_cache = {} - for id, state in graph.items(): - assert state.id == id - if state.tree is not None: - meta = state.meta - if meta is None: - # No metadata, likely because of an error. We still want to retain the AST. - # There is no corresponding JSON so create partial "memory-only" metadata. - assert state.path - dep_prios = state.dependency_priorities() - dep_lines = state.dependency_lines() - meta = memory_only_cache_meta( - id, - state.path, - state.dependencies, - state.suppressed, - list(state.child_modules), - dep_prios, - dep_lines, - state.source_hash, - state.ignore_all, - manager) - else: - meta = meta._replace(memory_only=True) - saved_cache[id] = (meta, state.tree, state.type_map()) - return saved_cache - - -def memory_only_cache_meta(id: str, - path: str, - dependencies: List[str], - suppressed: List[str], - child_modules: List[str], - dep_prios: List[int], - dep_lines: List[int], - source_hash: str, - ignore_all: bool, - manager: BuildManager) -> CacheMeta: - """Create cache metadata for module that doesn't have a JSON cache files. - - JSON cache files aren't written for modules with errors, but we want to still - cache them in fine-grained incremental mode. - """ - options = manager.options.clone_for_module(id) - # Note that we omit attributes related to the JSON files. - meta = {'id': id, - 'path': path, - 'memory_only': True, # Important bit: don't expect JSON files to exist - 'hash': source_hash, - 'dependencies': dependencies, - 'suppressed': suppressed, - 'child_modules': child_modules, - 'options': options.select_options_affecting_cache(), - 'dep_prios': dep_prios, - 'dep_lines': dep_lines, - 'interface_hash': '', - 'version_id': manager.version_id, - 'ignore_all': ignore_all, - } - return cache_meta_from_dict(meta, '') - - def invalidate_stale_cache_entries(cache: SavedCache, + graph: Graph, changed_modules: List[Tuple[str, str]]) -> None: for name, _ in changed_modules: if name in cache: del cache[name] + if name in graph: + del graph[name] def verify_dependencies(state: State, manager: BuildManager) -> None: @@ -809,8 +743,8 @@ def reprocess_nodes(manager: BuildManager, Return fired triggers. """ - if module_id not in manager.saved_cache or module_id not in graph: - manager.log_fine_grained('%s not in saved cache or graph (blocking errors or deleted?)' % + if module_id not in graph: + manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' % module_id) return set() @@ -863,10 +797,8 @@ def key(node: DeferredNode) -> int: merge_asts(file_node, old_symbols[name], file_node, new_symbols[name]) # Type check. - meta, file_node, type_map = manager.saved_cache[module_id] - graph[module_id].tree = file_node - graph[module_id].type_checker().type_map = type_map checker = graph[module_id].type_checker() + checker.reset() # We seem to need additional passes in fine-grained incremental mode. checker.pass_num = 0 checker.last_pass = 3 diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 0b1f1573760e..4cca36ef3503 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -149,6 +149,7 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True options.show_traceback = True + options.verbosity = 1 if 'optional' in testcase.file: options.strict_optional = True if incremental_step: diff --git a/test-data/unit/fine-grained-cycles.test b/test-data/unit/fine-grained-cycles.test index 737d0ed0d4eb..2688e593482e 100644 --- a/test-data/unit/fine-grained-cycles.test +++ b/test-data/unit/fine-grained-cycles.test @@ -174,44 +174,7 @@ def h() -> None: == a.py:1: error: Module 'b' has no attribute 'C' -[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-cache] --- Different cache/no-cache tests because: --- Cache mode has a "need type annotation" message (like coarse incremental does) - -import a - -[file a.py] -from b import C - -def f() -> C: pass - -[file b.py] -import a - -class C: - def g(self) -> None: pass - -def h() -> None: - c = a.f() - c.g() - -[file b.py.2] -import a - -def C() -> int: pass - -def h() -> None: - c = a.f() - c.g() - -[out] -== -a.py:3: error: Invalid type "b.C" - -[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-nocache] --- Different cache/no-cache tests because: --- Cache mode has a "need type annotation" message (like coarse incremental does) - +[case testReferenceToTypeThroughCycleAndReplaceWithFunction] import a [file a.py] diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 2c60ff30615f..44a48451b0ae 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -236,8 +236,7 @@ main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" [case testDeletionOfSubmoduleTriggersImportFrom1-skip-cache] -- Different cache/no-cache tests because: --- Cache mode matches the message from regular mode and no-cache mode --- matches the message from coarse incremental mode... +-- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] @@ -245,15 +244,15 @@ from p import q [file p/q.py.3] [out] == -main:1: error: Cannot find module named 'p.q' +main:1: error: Module 'p' has no attribute 'q' -- TODO: The following messages are different compared to non-incremental mode +main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == [case testDeletionOfSubmoduleTriggersImportFrom1-skip-nocache] -- Different cache/no-cache tests because: --- Cache mode matches the message from regular mode and no-cache mode --- matches the message from coarse incremental mode... +-- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 17b022744829..10abcdb72084 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1108,9 +1108,9 @@ def f() -> Iterator[None]: [out] main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' == +main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' a.py:3: error: Cannot find module named 'b' a.py:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' == main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' @@ -2355,7 +2355,7 @@ def f() -> None: d.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" == -[case testNonePartialType] +[case testNonePartialType1] import a a.y From 2218c11e160464a9482078f2f0cfa280788f8cdd Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 21 Feb 2018 18:28:00 -0800 Subject: [PATCH 04/10] Clean up a lot of cruft --- mypy/build.py | 8 ----- mypy/server/update.py | 70 ++++++++++-------------------------------- mypy/test/testcheck.py | 1 - mypy/test/testmerge.py | 18 +++++------ 4 files changed, 25 insertions(+), 72 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 5c6ed710ceb9..a164f5fd8d84 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -389,7 +389,6 @@ def default_lib_path(data_dir: str, CacheMeta = NamedTuple('CacheMeta', [('id', str), ('path', str), - ('memory_only', bool), # no corresponding json files (fine-grained only) ('mtime', int), ('size', int), ('hash', str), @@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: return CacheMeta( meta.get('id', sentinel), meta.get('path', sentinel), - meta.get('memory_only', False), int(meta['mtime']) if 'mtime' in meta else sentinel, meta.get('size', sentinel), meta.get('hash', sentinel), @@ -1121,12 +1119,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id)) return None - if meta.memory_only: - # Special case for fine-grained incremental mode when the JSON file is missing but - # we want to cache the module anyway. - manager.log('Memory-only metadata for {}'.format(id)) - return meta - assert path is not None, "Internal error: meta was provided without a path" # Check data_json; assume if its mtime matches it's good. # TODO: stat() errors diff --git a/mypy/server/update.py b/mypy/server/update.py index e09174b91059..d76c462771b0 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -115,15 +115,14 @@ - Fully support multiple type checking passes - Use mypy.fscache to access file system -- Don't use load_graph() and update the import graph incrementally """ import os.path from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple from mypy.build import ( - BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta, - cache_meta_from_dict, find_module_clear_caches, DEBUG_FINE_GRAINED + BuildManager, State, BuildSource, Graph, load_graph, find_module_clear_caches, + DEBUG_FINE_GRAINED, ) from mypy.checker import DeferredNode from mypy.errors import Errors, CompileError @@ -172,7 +171,6 @@ def __init__(self, # this directly reflected in load_graph's interface. self.options.cache_dir = os.devnull manager.saved_cache = {} - self.type_maps = extract_type_maps(graph) # Active triggers during the last update self.triggered = [] # type: List[str] @@ -253,6 +251,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], # TODO: If new module brings in other modules, we parse some files multiple times. manager = self.manager previous_modules = self.previous_modules + graph = self.graph # Record symbol table snaphot of old version the changed module. old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]] @@ -261,14 +260,14 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], old_snapshots[module] = snapshot manager.errors.reset() - result = update_single_isolated(module, path, manager, previous_modules, self.graph) + result = update_single_isolated(module, path, manager, previous_modules, graph) if isinstance(result, BlockedUpdate): # Blocking error -- just give up module, path, remaining, errors = result self.previous_modules = get_module_to_path_map(manager) return errors, remaining, (module, path), True assert isinstance(result, NormalUpdate) # Work around #4124 - module, path, remaining, tree, graph = result + module, path, remaining, tree = result # TODO: What to do with stale dependencies? triggered = calculate_active_triggers(manager, old_snapshots, {module: tree}) @@ -285,20 +284,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], # Preserve state needed for the next update. self.previous_targets_with_errors = manager.errors.targets() - # If deleted, module won't be in the graph. - if module in graph: - # Generate metadata so that we can reuse the AST in the next run. - graph[module].write_cache() - for id, state in graph.items(): - # Look up missing ASTs from saved cache. - if state.tree is None and id in manager.saved_cache: - meta, tree, type_map = manager.saved_cache[id] - state.tree = tree self.previous_modules = get_module_to_path_map(manager) - self.type_maps = extract_type_maps(graph) - - # XXX: I want us to not need this - self.graph = graph return manager.errors.new_messages(), remaining, (module, path), False @@ -317,15 +303,13 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], # - Id of the changed module (can be different from the module argument) # - Path of the changed module # - New AST for the changed module (None if module was deleted) -# - The entire updated build graph # - Remaining changed modules that are not processed yet as (module id, path) # tuples (non-empty if the original changed module imported other new # modules) NormalUpdate = NamedTuple('NormalUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]]), - ('tree', Optional[MypyFile]), - ('graph', Graph)]) + ('tree', Optional[MypyFile])]) # The result of update_single_isolated when there is a blocking error. Items # are similar to NormalUpdate (but there are fewer). @@ -362,10 +346,11 @@ def update_single_isolated(module: str, old_modules = dict(manager.modules) sources = get_sources(previous_modules, [(module, path)]) - invalidate_stale_cache_entries(manager.saved_cache, graph, [(module, path)]) manager.missing_modules.clear() try: + if module in graph: + del graph[module] load_graph(sources, manager, graph) except CompileError as err: # Parse error somewhere in the program -- a blocker @@ -383,8 +368,8 @@ def update_single_isolated(module: str, return BlockedUpdate(err.module_with_blocker, path, remaining_modules, err.messages) if not os.path.isfile(path): - graph = delete_module(module, graph, manager) - return NormalUpdate(module, path, [], None, graph) + delete_module(module, graph, manager) + return NormalUpdate(module, path, [], None) # Find any other modules brought in by imports. changed_modules = get_all_changed_modules(module, path, previous_modules, graph) @@ -438,7 +423,7 @@ def update_single_isolated(module: str, graph[module] = state - return NormalUpdate(module, path, remaining_modules, state.tree, graph) + return NormalUpdate(module, path, remaining_modules, state.tree) def find_relative_leaf_module(modules: List[Tuple[str, str]], graph: Graph) -> Tuple[str, str]: @@ -475,14 +460,13 @@ def assert_equivalent_paths(path1: str, path2: str) -> None: def delete_module(module_id: str, - graph: Dict[str, State], - manager: BuildManager) -> Dict[str, State]: + graph: Graph, + manager: BuildManager) -> None: manager.log_fine_grained('delete module %r' % module_id) # TODO: Deletion of a package # TODO: Remove deps for the module (this only affects memory use, not correctness) - new_graph = graph.copy() - if module_id in new_graph: - del new_graph[module_id] + if module_id in graph: + del graph[module_id] if module_id in manager.modules: del manager.modules[module_id] if module_id in manager.saved_cache: @@ -496,7 +480,6 @@ def delete_module(module_id: str, parent = manager.modules[parent_id] if components[-1] in parent.names: del parent.names[components[-1]] - return new_graph def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]: @@ -518,15 +501,10 @@ def get_sources(modules: Dict[str, str], changed_modules: List[Tuple[str, str]]) -> List[BuildSource]: # TODO: Race condition when reading from the file system; we should only read each # bit of external state once during a build to have a consistent view of the world - items = sorted(modules.items(), key=lambda x: x[0]) - sources = [BuildSource(path, id, None) - for id, path in items - if os.path.isfile(path)] sources = [] for id, path in changed_modules: - if os.path.isfile(path):# and id not in modules: + if os.path.isfile(path): sources.append(BuildSource(path, id, None)) - # print(changed_modules, sources) return sources @@ -544,16 +522,6 @@ def get_all_changed_modules(root_module: str, return changed_modules -def invalidate_stale_cache_entries(cache: SavedCache, - graph: Graph, - changed_modules: List[Tuple[str, str]]) -> None: - for name, _ in changed_modules: - if name in cache: - del cache[name] - if name in graph: - del graph[name] - - def verify_dependencies(state: State, manager: BuildManager) -> None: """Report errors for import targets in module that don't exist.""" for dep in state.dependencies + state.suppressed: # TODO: ancestors? @@ -907,11 +875,5 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod return [DeferredNode(node, active_class_name, active_class)] -def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]: - # This is used to export information used only by the testmerge harness. - return {id: state.type_map() for id, state in graph.items() - if state.tree} - - def is_verbose(manager: BuildManager) -> bool: return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 4cca36ef3503..0b1f1573760e 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -149,7 +149,6 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True options.show_traceback = True - options.verbosity = 1 if 'optional' in testcase.file: options.strict_optional = True if incremental_step: diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index cc79958d5c15..20dd7f9d6980 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -5,7 +5,7 @@ from typing import List, Tuple, Dict, Optional from mypy import build -from mypy.build import BuildManager, BuildSource, State +from mypy.build import BuildManager, BuildSource, State, Graph from mypy.errors import Errors, CompileError from mypy.nodes import ( Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, UNBOUND_IMPORTED @@ -77,13 +77,13 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: target_path = os.path.join(test_temp_dir, 'target.py') shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path) - a.extend(self.dump(manager, kind)) + a.extend(self.dump(fine_grained_manager, kind)) old_subexpr = get_subexpressions(manager.modules['target']) a.append('==>') new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path) - a.extend(self.dump(manager, kind)) + a.extend(self.dump(fine_grained_manager, kind)) for expr in old_subexpr: # Verify that old AST nodes are removed from the expression type map. @@ -119,13 +119,13 @@ def build_increment(self, manager: FineGrainedBuildManager, Dict[Expression, Type]]: manager.update([(module_id, path)]) module = manager.manager.modules[module_id] - type_map = manager.type_maps[module_id] + type_map = manager.graph[module_id].type_map() return module, type_map def dump(self, - manager: BuildManager, + manager: FineGrainedBuildManager, kind: str) -> List[str]: - modules = manager.modules + modules = manager.manager.modules if kind == AST: return self.dump_asts(modules) elif kind == TYPEINFO: @@ -203,14 +203,14 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]: type_str_conv=self.type_str_conv) return s.splitlines() - def dump_types(self, manager: BuildManager) -> List[str]: + def dump_types(self, manager: FineGrainedBuildManager) -> List[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. - for module_id in sorted(manager.modules): + for module_id in sorted(manager.manager.modules): if not is_dumped_module(module_id): continue - type_map = manager.saved_cache[module_id][2] + type_map = manager.graph[module_id].type_map() if type_map: a.append('## {}'.format(module_id)) for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), From 7520f89dc4022d0f55c4237d42180e6be3d92cd8 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Tue, 27 Feb 2018 20:28:31 -0800 Subject: [PATCH 05/10] Fix the missing_modules related failures. This has been a bit of an adventure. Since we are no longer doing a full `load_graph()`, we can't rely on it fully populating `missing_modules` if it is cleared between updates. If `missing_modules` isn't fully populated, then semanal will spuriously reject `from x import y` where `x.y` is a missing module. We can't just *not* clear `missing_modules`, however, because `parse_file` uses it to decide that modules are suppressed. But this is actually wrong! It can lead to an import failure message not being generated because some other module has already failed to import it (and perhaps even ignored the error). `parse_file()` shouldn't actually *need* to compute `suppressed`, though. If it is called on a file with no cache, `load_graph()` will handle moving deps to `suppressed`, and if called on a file that has had cache info loaded, then the dependency information has all been loaded from the cache. So we refactor things so that dep information from the cache is used when available and `parse_file` doesn't need to deal with it. I strongly suspect that this refactor obviates the need for `fix_suppressed_dependencies`, but I was not able to quickly produce a test case from the description in #2036, so I am not comfortable making that change. --- mypy/build.py | 46 +++--- mypy/server/update.py | 10 +- test-data/unit/check-dmypy-fine-grained.test | 15 +- test-data/unit/check-incremental.test | 68 +++++++++ test-data/unit/check-modules.test | 12 ++ test-data/unit/fine-grained-modules.test | 142 ++++++++++++++++--- 6 files changed, 250 insertions(+), 43 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index a164f5fd8d84..a855ec390682 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1660,7 +1660,7 @@ def __init__(self, else: # Parse the file (and then some) to get the dependencies. self.parse_file() - self.suppressed = [] + self.compute_dependencies() self.child_modules = set() def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None: @@ -1815,6 +1815,8 @@ def fix_suppressed_dependencies(self, graph: Graph) -> None: """ # TODO: See if it's possible to move this check directly into parse_file in some way. # TODO: Find a way to write a test case for this fix. + # TODO: I suspect that splitting compute_dependencies() out from parse_file + # obviates the need for this but lacking a test case for the problem this fixed... silent_mode = (self.options.ignore_missing_imports or self.options.follow_imports == 'skip') if not silent_mode: @@ -1881,31 +1883,35 @@ def parse_file(self) -> None: # TODO: Why can't SemanticAnalyzerPass1 .analyze() do this? self.tree.names = manager.semantic_analyzer.globals + for pri, id, line in manager.all_imported_modules_in_file(self.tree): + if id == '': + # Must be from a relative import. + manager.errors.set_file(self.xpath, self.id) + manager.errors.report(line, 0, + "No parent module -- cannot perform relative import", + blocker=True) + + self.check_blockers() + + def compute_dependencies(self): + """Compute a module's dependencies after parsing it. + + This is used when we parse a file that we didn't have + up-to-date cache information for. When we have an up-to-date + cache, we just use the cached info. + """ + manager = self.manager + # Compute (direct) dependencies. # Add all direct imports (this is why we needed the first pass). # Also keep track of each dependency's source line. dependencies = [] - suppressed = [] priorities = {} # type: Dict[str, int] # id -> priority dep_line_map = {} # type: Dict[str, int] # id -> line for pri, id, line in manager.all_imported_modules_in_file(self.tree): priorities[id] = min(pri, priorities.get(id, PRI_ALL)) if id == self.id: continue - # Omit missing modules, as otherwise we could not type-check - # programs with missing modules. - if id in manager.missing_modules: - if id not in dep_line_map: - suppressed.append(id) - dep_line_map[id] = line - continue - if id == '': - # Must be from a relative import. - manager.errors.set_file(self.xpath, self.id) - manager.errors.report(line, 0, - "No parent module -- cannot perform relative import", - blocker=True) - continue if id not in dep_line_map: dependencies.append(id) dep_line_map[id] = line @@ -1913,17 +1919,17 @@ def parse_file(self) -> None: if self.id != 'builtins' and 'builtins' not in dep_line_map: dependencies.append('builtins') - # If self.dependencies is already set, it was read from the - # cache, but for some reason we're re-parsing the file. # NOTE: What to do about race conditions (like editing the # file while mypy runs)? A previous version of this code # explicitly checked for this, but ran afoul of other reasons # for differences (e.g. silent mode). + + # Missing dependencies will be moved from dependencies to + # suppressed when they fail to be loaded in load_graph. self.dependencies = dependencies - self.suppressed = suppressed + self.suppressed = [] self.priorities = priorities self.dep_line_map = dep_line_map - self.check_blockers() def semantic_analysis(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" diff --git a/mypy/server/update.py b/mypy/server/update.py index d76c462771b0..93ff54537339 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -122,7 +122,7 @@ from mypy.build import ( BuildManager, State, BuildSource, Graph, load_graph, find_module_clear_caches, - DEBUG_FINE_GRAINED, + PRI_INDIRECT, DEBUG_FINE_GRAINED, ) from mypy.checker import DeferredNode from mypy.errors import Errors, CompileError @@ -347,7 +347,9 @@ def update_single_isolated(module: str, old_modules = dict(manager.modules) sources = get_sources(previous_modules, [(module, path)]) - manager.missing_modules.clear() + if module in manager.missing_modules: + manager.missing_modules.remove(module) + try: if module in graph: del graph[module] @@ -524,7 +526,9 @@ def get_all_changed_modules(root_module: str, def verify_dependencies(state: State, manager: BuildManager) -> None: """Report errors for import targets in module that don't exist.""" - for dep in state.dependencies + state.suppressed: # TODO: ancestors? + # Strip out indirect dependencies. See comment in build.load_graph(). + dependencies = [dep for dep in state.dependencies if state.priorities.get(dep) != PRI_INDIRECT] + for dep in dependencies + state.suppressed: # TODO: ancestors? if dep not in manager.modules and not manager.options.ignore_missing_imports: assert state.tree line = find_import_line(state.tree, dep) or 1 diff --git a/test-data/unit/check-dmypy-fine-grained.test b/test-data/unit/check-dmypy-fine-grained.test index 0544c9b700a7..0841085cdf7c 100644 --- a/test-data/unit/check-dmypy-fine-grained.test +++ b/test-data/unit/check-dmypy-fine-grained.test @@ -32,7 +32,7 @@ def f() -> str: [out2] tmp/b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") -[case testAddFileFineGrainedIncremental] +[case testAddFileFineGrainedIncremental1] # cmd: mypy -m a # cmd2: mypy -m a b [file a.py] @@ -46,6 +46,19 @@ tmp/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-impor [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" +[case testAddFileFineGrainedIncremental2] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy -m a +# cmd2: mypy -m a b +[file a.py] +import b +b.f(1) +[file b.py.2] +def f(x: str) -> None: pass +[out1] +[out2] +tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" + [case testDeleteFileFineGrainedIncremental] # cmd: mypy -m a b # cmd2: mypy -m a diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 599722e993f9..03582bad4e0e 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -4059,3 +4059,71 @@ class Baz: [out] [out2] tmp/a.py:3: error: Unsupported operand types for + ("int" and "str") + +[case testIncrementalWithIgnoresTwice] +import a +[file a.py] +import b +import foo # type: ignore +[file b.py] +x = 1 +[file b.py.2] +x = 'hi' +[file b.py.3] +x = 1 +[builtins fixtures/module.pyi] +[out] +[out2] +[out3] + +[case testIgnoredImport2] +import x +[file y.py] +import xyz # type: ignore +B = 0 +from x import A +[file x.py] +A = 0 +from y import B +[file x.py.2] +A = 1 +from y import B +[file x.py.3] +A = 2 +from y import B +[out] +[out2] +[out3] + +[case testDeletionOfSubmoduleTriggersImportFrom2] +from p.q import f +f() +[file p/__init__.py] +[file p/q.py] +def f() -> None: pass +[delete p/q.py.2] +[file p/q.py.3] +def f(x: int) -> None: pass +[out] +[out2] +main:1: error: Cannot find module named 'p.q' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +[out3] +main:2: error: Too few arguments for "f" + +[case testDeleteIndirectDependency] +import b +b.x.foo() +[file b.py] +import c +x = c.Foo() +[file c.py] +class Foo: + def foo(self) -> None: pass +[delete c.py.2] +[file b.py.2] +class Foo: + def foo(self) -> None: pass +x = Foo() +[out] +[out2] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index d8492dad0436..06e926c7711c 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1943,3 +1943,15 @@ main:2: error: Name 'name' is not defined main:3: error: Revealed type is 'Any' [builtins fixtures/module.pyi] + +[case testFailedImportFromTwoModules] +import c +import b +[file b.py] +import c + +[out] +-- TODO: it would be better for this to be in the other order +tmp/b.py:1: error: Cannot find module named 'c' +main:1: error: Cannot find module named 'c' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 44a48451b0ae..fbec7e0bcb2c 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -56,7 +56,7 @@ b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == -[case testAddFileFixesAndGeneratesError] +[case testAddFileFixesAndGeneratesError1] import b [file b.py] [file b.py.2] @@ -76,6 +76,38 @@ b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" == b.py:2: error: Too many arguments for "f" +[case testAddFileFixesAndGeneratesError2] +import b +[file b.py] +[file b.py.2] +from a import f +f(1) +[file c.py.3] +x = 'whatever' +[file a.py.4] +def f() -> None: pass +[out] +== +b.py:1: error: Cannot find module named 'a' +b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +== +b.py:1: error: Cannot find module named 'a' +b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +== +b.py:2: error: Too many arguments for "f" + +[case testAddFileGeneratesError1] +# flags: --ignore-missing-imports +import a +[file a.py] +from b import f +f(1) +[file b.py.2] +def f() -> None: pass +[out] +== +a.py:2: error: Too many arguments for "f" + [case testAddFilePreservesError1] import b [file b.py] @@ -234,9 +266,7 @@ main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == -[case testDeletionOfSubmoduleTriggersImportFrom1-skip-cache] --- Different cache/no-cache tests because: --- missing module error message mismatch +[case testDeletionOfSubmoduleTriggersImportFrom1] from p import q [file p/__init__.py] [file p/q.py] @@ -250,20 +280,6 @@ main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == -[case testDeletionOfSubmoduleTriggersImportFrom1-skip-nocache] --- Different cache/no-cache tests because: --- missing module error message mismatch -from p import q -[file p/__init__.py] -[file p/q.py] -[delete p/q.py.2] -[file p/q.py.3] -[out] -== -main:1: error: Module 'p' has no attribute 'q' -== - - [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() @@ -637,7 +653,6 @@ main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find module named 'p' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -main:1: error: Cannot find module named 'p.a' [case testDeletePackage3] import p.a @@ -845,3 +860,92 @@ x = 1 [out] == main:2: error: Incompatible types in assignment (expression has type Module, variable has type "str") + +[case testRefreshImportOfIgnoredModule1] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy c.py a/__init__.py b.py +[file c.py] +from a import a2 +import b +b.x +[file a/__init__.py] +[file b.py] +x = 0 +[file b.py.2] +x = '' +[file b.py.3] +x = 0 +[file a/a2.py] +[out] +== +== + +[case testRefreshImportOfIgnoredModule2] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy c.py a/__init__.py b.py +[file c.py] +from a import a2 +import b +b.x +[file a/__init__.py] +[file b.py] +x = 0 +[file b.py.2] +x = '' +[file b.py.3] +x = 0 +[file a/a2/__init__.py] +[out] +== +== + +[case testIncrementalWithIgnoresTwice] +import a +[file a.py] +import b +import foo # type: ignore +[file b.py] +x = 1 +[file b.py.2] +x = 'hi' +[file b.py.3] +x = 1 +[out] +== +== + +[case testIgnoredImport2] +import x +[file y.py] +import xyz # type: ignore +B = 0 +from x import A +[file x.py] +A = 0 +from y import B +[file x.py.2] +A = 1 +from y import B +[file x.py.3] +A = 2 +from y import B +[out] +== +== + +[case testDeleteIndirectDependency] +import b +b.x.foo() +[file b.py] +import c +x = c.Foo() +[file c.py] +class Foo: + def foo(self) -> None: pass +[delete c.py.2] +[file b.py.2] +class Foo: + def foo(self) -> None: pass +x = Foo() +[out] +== From 95c32448bda8bf4b459ce64cdb20ca13add65ed4 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Tue, 27 Feb 2018 22:03:16 -0800 Subject: [PATCH 06/10] Add a missing type annotation --- mypy/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy/build.py b/mypy/build.py index a855ec390682..456438d0e816 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1893,7 +1893,7 @@ def parse_file(self) -> None: self.check_blockers() - def compute_dependencies(self): + def compute_dependencies(self) -> None: """Compute a module's dependencies after parsing it. This is used when we parse a file that we didn't have @@ -1901,6 +1901,7 @@ def compute_dependencies(self): cache, we just use the cached info. """ manager = self.manager + assert self.tree is not None # Compute (direct) dependencies. # Add all direct imports (this is why we needed the first pass). From 934588bc1f3e15fcb4f67cab8e971a4f23f868eb Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 28 Feb 2018 10:04:09 -0800 Subject: [PATCH 07/10] Revert "Remove the all_are_submodules handling" This reverts commit c21e21653e4a6da008795cb056851b767f1af9ce. Also fix tests, since this reintroduces a cache/no-cache divergence. --- mypy/build.py | 14 +++++++++++--- test-data/unit/fine-grained-modules.test | 18 +++++++++++++++++- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 456438d0e816..98c1ba8e89c3 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -656,15 +656,23 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: elif isinstance(imp, ImportFrom): cur_id = correct_rel_imp(imp) pos = len(res) + all_are_submodules = True # Also add any imported names that are submodules. pri = import_priority(imp, PRI_MED) for name, __ in imp.names: sub_id = cur_id + '.' + name if self.is_module(sub_id): res.append((pri, sub_id, imp.line)) - - pri = import_priority(imp, PRI_HIGH) - res.insert(pos, ((pri, cur_id, imp.line))) + else: + all_are_submodules = False + # If all imported names are submodules, don't add + # cur_id as a dependency. Otherwise (i.e., if at + # least one imported name isn't a submodule) + # cur_id is also a dependency, and we should + # insert it *before* any submodules. + if not all_are_submodules: + pri = import_priority(imp, PRI_HIGH) + res.insert(pos, ((pri, cur_id, imp.line))) elif isinstance(imp, ImportAll): pri = import_priority(imp, PRI_HIGH) res.append((pri, correct_rel_imp(imp), imp.line)) diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index fbec7e0bcb2c..727ac060ec04 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -266,7 +266,9 @@ main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == -[case testDeletionOfSubmoduleTriggersImportFrom1] +[case testDeletionOfSubmoduleTriggersImportFrom1-skip-cache] +-- Different cache/no-cache tests because: +-- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] @@ -280,6 +282,20 @@ main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == +[case testDeletionOfSubmoduleTriggersImportFrom1-skip-nocache] +-- Different cache/no-cache tests because: +-- missing module error message mismatch +from p import q +[file p/__init__.py] +[file p/q.py] +[delete p/q.py.2] +[file p/q.py.3] +[out] +== +main:1: error: Cannot find module named 'p.q' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +== + [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() From f4e55f4828a11ade956155596bd0c0cde1614d93 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 28 Feb 2018 11:14:56 -0800 Subject: [PATCH 08/10] Various cleanups --- mypy/build.py | 20 +++++++++++--------- test-data/unit/fine-grained-modules.test | 6 ++++++ 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 98c1ba8e89c3..c323e43ff162 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -567,7 +567,7 @@ class BuildManager: plugin: Active mypy plugin(s) errors: Used for reporting all errors flush_errors: A function for processing errors after each SCC - saved_cache: Dict with saved cache state for dmypy and fine-grained incremental mode + saved_cache: Dict with saved cache state for coarse-grained dmypy (read-write!) stats: Dict with various instrumentation numbers """ @@ -624,6 +624,8 @@ def all_imported_modules_in_file(self, Return list of tuples (priority, module id, import line number) for all modules imported in file; lower numbers == higher priority. + + Can generate blocking errors on bogus relative imports. """ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: @@ -638,6 +640,12 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: file_id = ".".join(file_id.split(".")[:-rel]) new_id = file_id + "." + imp.id if imp.id else file_id + if not new_id: + self.errors.set_file(file.path, file.name()) + self.errors.report(imp.line, 0, + "No parent module -- cannot perform relative import", + blocker=True) + return new_id res = [] # type: List[Tuple[int, str, int]] @@ -1891,14 +1899,6 @@ def parse_file(self) -> None: # TODO: Why can't SemanticAnalyzerPass1 .analyze() do this? self.tree.names = manager.semantic_analyzer.globals - for pri, id, line in manager.all_imported_modules_in_file(self.tree): - if id == '': - # Must be from a relative import. - manager.errors.set_file(self.xpath, self.id) - manager.errors.report(line, 0, - "No parent module -- cannot perform relative import", - blocker=True) - self.check_blockers() def compute_dependencies(self) -> None: @@ -1940,6 +1940,8 @@ def compute_dependencies(self) -> None: self.priorities = priorities self.dep_line_map = dep_line_map + self.check_blockers() # Can fail due to bogus relative imports + def semantic_analysis(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" patches = [] # type: List[Tuple[int, Callable[[], None]]] diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 727ac060ec04..c765af39553c 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -963,5 +963,11 @@ class Foo: class Foo: def foo(self) -> None: pass x = Foo() +[file b.py.3] +class Foo: + def foo(self, x: int) -> None: pass +x = Foo() [out] == +== +main:2: error: Too few arguments for "foo" of "Foo" From 8322c8c04f3b5e14382e6aa7034fa7aee33084bf Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 28 Feb 2018 12:20:21 -0800 Subject: [PATCH 09/10] More clearing and asserts in the typechecker reset --- mypy/checker.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index 77b8e6240b64..e58aece05747 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -222,9 +222,17 @@ def reset(self) -> None: This allows us to reuse TypeChecker objects in fine-grained incremental mode. """ + # TODO: verify this is still actually worth it over creating new checkers self.partial_reported.clear() + self.module_refs.clear() + self.binder = ConditionalTypeBinder() + self.type_map.clear() + + assert self.inferred_attribute_types is None assert self.partial_types == [] assert self.deferred_nodes == [] + assert len(self.scope.stack) == 1 + assert self.partial_types == [] def check_first_pass(self) -> None: """Type check the entire file, but defer functions with unresolved references. From d47920de182c531ed9d8ccff5553c0ee1c38ea0b Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 28 Feb 2018 16:18:36 -0800 Subject: [PATCH 10/10] Minor cleanups --- mypy/build.py | 3 +++ mypy/dmypy_server.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mypy/build.py b/mypy/build.py index c323e43ff162..36cec9e86b39 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2199,6 +2199,9 @@ def load_graph(sources: List[BuildSource], manager: BuildManager, old_graph: Optional[Graph] = None) -> Graph: """Given some source files, load the full dependency graph. + If an old_graph is passed in, it is used as the starting point and + modified during graph loading. + As this may need to parse files, this can raise CompileError in case there are syntax errors. """ diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index c423053d2e9e..a26a256fb011 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -285,8 +285,9 @@ def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict for state in self.fine_grained_manager.graph.values(): meta = state.meta if meta is None: continue + assert state.path is not None self.fswatcher.set_file_data( - state.xpath, + state.path, FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash)) # Run an update