@@ -170,6 +170,10 @@ def __init__(self,
170170 self .blocking_error = None # type: Optional[Tuple[str, str]]
171171 # Module that we haven't processed yet but that are known to be stale.
172172 self .stale = [] # type: List[Tuple[str, str]]
173+ # Disable the cache so that load_graph doesn't try going back to disk
174+ # for the cache. This is kind of a hack and it might be better to have
175+ # this directly reflected in load_graph's interface.
176+ self .options .cache_dir = os .devnull
173177 mark_all_meta_as_memory_only (graph , manager )
174178 manager .saved_cache = preserve_full_cache (graph , manager )
175179 self .type_maps = extract_type_maps (graph )
@@ -281,9 +285,10 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
281285 print ('triggered:' , sorted (filtered ))
282286 self .triggered .extend (triggered | self .previous_targets_with_errors )
283287 collect_dependencies ({module : tree }, self .deps , graph )
284- propagate_changes_using_dependencies (manager , graph , self .deps , triggered ,
285- {module },
286- self .previous_targets_with_errors )
288+ remaining += propagate_changes_using_dependencies (
289+ manager , graph , self .deps , triggered ,
290+ {module },
291+ self .previous_targets_with_errors )
287292
288293 # Preserve state needed for the next update.
289294 self .previous_targets_with_errors = manager .errors .targets ()
@@ -318,6 +323,7 @@ def mark_all_meta_as_memory_only(graph: Dict[str, State],
318323def get_all_dependencies (manager : BuildManager , graph : Dict [str , State ],
319324 options : Options ) -> Dict [str , Set [str ]]:
320325 """Return the fine-grained dependency map for an entire build."""
326+ # Deps for each module were computed during build() or loaded from the cache.
321327 deps = {} # type: Dict[str, Set[str]]
322328 collect_dependencies (manager .modules , deps , graph )
323329 return deps
@@ -374,7 +380,7 @@ def update_single_isolated(module: str,
374380 sources = get_sources (previous_modules , [(module , path )])
375381 invalidate_stale_cache_entries (manager .saved_cache , [(module , path )])
376382
377- manager .missing_modules = set ()
383+ manager .missing_modules . clear ()
378384 try :
379385 graph = load_graph (sources , manager )
380386 except CompileError as err :
@@ -441,6 +447,7 @@ def update_single_isolated(module: str,
441447 # Perform type checking.
442448 state .type_check_first_pass ()
443449 state .type_check_second_pass ()
450+ state .compute_fine_grained_deps ()
444451 state .finish_passes ()
445452 # TODO: state.write_cache()?
446453 # TODO: state.mark_as_rechecked()?
@@ -492,7 +499,8 @@ def delete_module(module_id: str,
492499 # TODO: Remove deps for the module (this only affects memory use, not correctness)
493500 assert module_id not in graph
494501 new_graph = graph .copy ()
495- del manager .modules [module_id ]
502+ if module_id in manager .modules :
503+ del manager .modules [module_id ]
496504 if module_id in manager .saved_cache :
497505 del manager .saved_cache [module_id ]
498506 components = module_id .split ('.' )
@@ -654,7 +662,6 @@ def collect_dependencies(new_modules: Mapping[str, Optional[MypyFile]],
654662 for id , node in new_modules .items ():
655663 if node is None :
656664 continue
657- graph [id ].compute_fine_grained_deps ()
658665 for trigger , targets in graph [id ].fine_grained_deps .items ():
659666 deps .setdefault (trigger , set ()).update (targets )
660667
@@ -711,9 +718,15 @@ def propagate_changes_using_dependencies(
711718 deps : Dict [str , Set [str ]],
712719 triggered : Set [str ],
713720 up_to_date_modules : Set [str ],
714- targets_with_errors : Set [str ]) -> None :
721+ targets_with_errors : Set [str ]) -> List [Tuple [str , str ]]:
722+ """Transitively rechecks targets based on triggers and the dependency map.
723+
724+ Returns a list (module id, path) tuples representing modules that contain
725+ a target that needs to be reprocessed but that has not been parsed yet."""
726+
715727 # TODO: Multiple type checking passes
716728 num_iter = 0
729+ remaining_modules = []
717730
718731 # Propagate changes until nothing visible has changed during the last
719732 # iteration.
@@ -737,7 +750,13 @@ def propagate_changes_using_dependencies(
737750 # TODO: Preserve order (set is not optimal)
738751 for id , nodes in sorted (todo .items (), key = lambda x : x [0 ]):
739752 assert id not in up_to_date_modules
740- triggered |= reprocess_nodes (manager , graph , id , nodes , deps )
753+ if manager .modules [id ].is_cache_skeleton :
754+ # We have only loaded the cache for this file, not the actual file,
755+ # so we can't access the nodes to reprocess.
756+ # Add it to the queue of files that need to be processed fully.
757+ remaining_modules .append ((id , manager .modules [id ].path ))
758+ else :
759+ triggered |= reprocess_nodes (manager , graph , id , nodes , deps )
741760 # Changes elsewhere may require us to reprocess modules that were
742761 # previously considered up to date. For example, there may be a
743762 # dependency loop that loops back to an originally processed module.
@@ -746,6 +765,8 @@ def propagate_changes_using_dependencies(
746765 if DEBUG :
747766 print ('triggered:' , list (triggered ))
748767
768+ return remaining_modules
769+
749770
750771def find_targets_recursive (
751772 triggers : Set [str ],
@@ -993,4 +1014,6 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
9931014
9941015
9951016def extract_type_maps (graph : Graph ) -> Dict [str , Dict [Expression , Type ]]:
996- return {id : state .type_map () for id , state in graph .items ()}
1017+ # This is used to export information used only by the testmerge harness.
1018+ return {id : state .type_map () for id , state in graph .items ()
1019+ if state .tree }
0 commit comments