Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't mark a module as stale if its children change #8134

Merged
merged 2 commits into from
Dec 12, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
19 changes: 3 additions & 16 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,6 @@ def normpath(path: str, options: Options) -> str:
('data_mtime', int), # mtime of data_json
('data_json', str), # path of <id>.data.json
('suppressed', List[str]), # dependencies that weren't imported
('child_modules', List[str]), # all submodules of the given module
('options', Optional[Dict[str, object]]), # build options
# dep_prios and dep_lines are in parallel with
# dependencies + suppressed.
Expand Down Expand Up @@ -317,7 +316,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
int(meta['data_mtime']) if 'data_mtime' in meta else sentinel,
data_json,
meta.get('suppressed', []),
meta.get('child_modules', []),
meta.get('options'),
meta.get('dep_prios', []),
meta.get('dep_lines', []),
Expand Down Expand Up @@ -1320,7 +1318,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
'data_mtime': meta.data_mtime,
'dependencies': meta.dependencies,
'suppressed': meta.suppressed,
'child_modules': meta.child_modules,
'options': (manager.options.clone_for_module(id)
.select_options_affecting_cache()),
'dep_prios': meta.dep_prios,
Expand Down Expand Up @@ -1364,7 +1361,7 @@ def json_dumps(obj: Any, debug_cache: bool) -> str:

def write_cache(id: str, path: str, tree: MypyFile,
dependencies: List[str], suppressed: List[str],
child_modules: List[str], dep_prios: List[int], dep_lines: List[int],
dep_prios: List[int], dep_lines: List[int],
old_interface_hash: str, source_hash: str,
ignore_all: bool, manager: BuildManager) -> Tuple[str, Optional[CacheMeta]]:
"""Write cache files for a module.
Expand All @@ -1379,7 +1376,6 @@ def write_cache(id: str, path: str, tree: MypyFile,
tree: the fully checked module data
dependencies: module IDs on which this module depends
suppressed: module IDs which were suppressed as dependencies
child_modules: module IDs which are this package's direct submodules
dep_prios: priorities (parallel array to dependencies)
dep_lines: import line locations (parallel array to dependencies)
old_interface_hash: the hash from the previous version of the data cache file
Expand Down Expand Up @@ -1469,7 +1465,6 @@ def write_cache(id: str, path: str, tree: MypyFile,
'data_mtime': data_mtime,
'dependencies': dependencies,
'suppressed': suppressed,
'child_modules': child_modules,
'options': options.select_options_affecting_cache(),
'dep_prios': dep_prios,
'dep_lines': dep_lines,
Expand Down Expand Up @@ -1688,9 +1683,6 @@ class State:
# Parent package, its parent, etc.
ancestors = None # type: Optional[List[str]]

# A list of all direct submodules of a given module
child_modules = None # type: Set[str]

# List of (path, line number) tuples giving context for import
import_context = None # type: List[Tuple[str, int]]

Expand Down Expand Up @@ -1797,7 +1789,6 @@ def __init__(self,
assert len(all_deps) == len(self.meta.dep_lines)
self.dep_line_map = {id: line
for id, line in zip(all_deps, self.meta.dep_lines)}
self.child_modules = set(self.meta.child_modules)
if temporary:
self.load_tree(temporary=True)
if not manager.use_fine_grained_cache():
Expand All @@ -1824,7 +1815,6 @@ def __init__(self,
# Parse the file (and then some) to get the dependencies.
self.parse_file()
self.compute_dependencies()
self.child_modules = set()

@property
def xmeta(self) -> CacheMeta:
Expand Down Expand Up @@ -1855,8 +1845,7 @@ def is_fresh(self) -> bool:
# dependency is added back we find out later in the process.
return (self.meta is not None
and self.is_interface_fresh()
and self.dependencies == self.meta.dependencies
and self.child_modules == set(self.meta.child_modules))
and self.dependencies == self.meta.dependencies)

def is_interface_fresh(self) -> bool:
return self.externally_same
Expand Down Expand Up @@ -2241,7 +2230,7 @@ def write_cache(self) -> None:
"Duplicates in dependencies list for {} ({})".format(self.id, self.dependencies))
new_interface_hash, self.meta = write_cache(
self.id, self.path, self.tree,
list(self.dependencies), list(self.suppressed), list(self.child_modules),
list(self.dependencies), list(self.suppressed),
dep_prios, dep_lines, self.interface_hash, self.source_hash, self.ignore_all,
self.manager)
if new_interface_hash == self.interface_hash:
Expand Down Expand Up @@ -2795,8 +2784,6 @@ def load_graph(sources: List[BuildSource], manager: BuildManager,
assert newst.id not in graph, newst.id
graph[newst.id] = newst
new.append(newst)
if dep in st.ancestors and dep in graph:
graph[dep].child_modules.add(st.id)
if dep in graph and dep in st.suppressed_set:
# Previously suppressed file is now visible
st.add_dependency(dep)
Expand Down
8 changes: 6 additions & 2 deletions test-data/unit/check-incremental.test
Original file line number Diff line number Diff line change
Expand Up @@ -947,11 +947,15 @@ from parent import a

[file parent/a.py.2]
from parent import b
reveal_type(b.x)

[file parent/b.py.2]
x = 10

[stale parent.a, parent.b]
[rechecked parent, parent.a, parent.b]
[stale parent.b]
[rechecked parent.a, parent.b]
[out2]
tmp/parent/a.py:2: note: Revealed type is 'builtins.int'

[case testIncrementalReferenceExistingFileWithImportFrom]
from parent import a, b
Expand Down