forked from python/mypy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
testmerge.py
237 lines (204 loc) · 8.69 KB
/
testmerge.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
"""Test cases for AST merge (used for fine-grained incremental checking)"""
import os
import shutil
from typing import List, Tuple, Dict, Optional
from mypy import build
from mypy.build import BuildResult
from mypy.modulefinder import BuildSource
from mypy.errors import CompileError
from mypy.nodes import (
Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, TypeVarExpr,
UNBOUND_IMPORTED
)
from mypy.server.subexpr import get_subexpressions
from mypy.server.update import FineGrainedBuildManager
from mypy.strconv import StrConv
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase, DataSuite
from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options
from mypy.types import TypeStrVisitor, Type
from mypy.util import short_type, IdMapper
# Which data structures to dump in a test case?
SYMTABLE = 'SYMTABLE'
TYPEINFO = ' TYPEINFO'
TYPES = 'TYPES'
AST = 'AST'
NOT_DUMPED_MODULES = (
'builtins',
'typing',
'abc',
'contextlib',
'sys',
'mypy_extensions',
'typing_extensions',
'enum',
)
class ASTMergeSuite(DataSuite):
files = ['merge.test']
def setup(self) -> None:
super().setup()
self.str_conv = StrConv(show_ids=True)
assert self.str_conv.id_mapper is not None
self.id_mapper: IdMapper = self.str_conv.id_mapper
self.type_str_conv = TypeStrVisitor(self.id_mapper)
def run_case(self, testcase: DataDrivenTestCase) -> None:
name = testcase.name
# We use the test case name to decide which data structures to dump.
# Dumping everything would result in very verbose test cases.
if name.endswith('_symtable'):
kind = SYMTABLE
elif name.endswith('_typeinfo'):
kind = TYPEINFO
elif name.endswith('_types'):
kind = TYPES
else:
kind = AST
main_src = '\n'.join(testcase.input)
result = self.build(main_src, testcase)
assert result is not None, 'cases where CompileError occurred should not be run'
result.manager.fscache.flush()
fine_grained_manager = FineGrainedBuildManager(result)
a = []
if result.errors:
a.extend(result.errors)
target_path = os.path.join(test_temp_dir, 'target.py')
shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path)
a.extend(self.dump(fine_grained_manager, kind))
old_subexpr = get_subexpressions(result.manager.modules['target'])
a.append('==>')
new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path)
a.extend(self.dump(fine_grained_manager, kind))
for expr in old_subexpr:
if isinstance(expr, TypeVarExpr):
# These are merged so we can't perform the check.
continue
# Verify that old AST nodes are removed from the expression type map.
assert expr not in new_types
if testcase.normalize_output:
a = normalize_error_messages(a)
assert_string_arrays_equal(
testcase.output, a,
f'Invalid output ({testcase.file}, line {testcase.line})')
def build(self, source: str, testcase: DataDrivenTestCase) -> Optional[BuildResult]:
options = parse_options(source, testcase, incremental_step=1)
options.incremental = True
options.fine_grained_incremental = True
options.use_builtins_fixtures = True
options.export_types = True
options.show_traceback = True
main_path = os.path.join(test_temp_dir, 'main')
with open(main_path, 'w', encoding='utf8') as f:
f.write(source)
try:
result = build.build(sources=[BuildSource(main_path, None, None)],
options=options,
alt_lib_path=test_temp_dir)
except CompileError:
# TODO: Is it okay to return None?
return None
return result
def build_increment(self, manager: FineGrainedBuildManager,
module_id: str, path: str) -> Tuple[MypyFile,
Dict[Expression, Type]]:
manager.flush_cache()
manager.update([(module_id, path)], [])
module = manager.manager.modules[module_id]
type_map = manager.graph[module_id].type_map()
return module, type_map
def dump(self,
manager: FineGrainedBuildManager,
kind: str) -> List[str]:
modules = manager.manager.modules
if kind == AST:
return self.dump_asts(modules)
elif kind == TYPEINFO:
return self.dump_typeinfos(modules)
elif kind == SYMTABLE:
return self.dump_symbol_tables(modules)
elif kind == TYPES:
return self.dump_types(manager)
assert False, f'Invalid kind {kind}'
def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]:
a = []
for m in sorted(modules):
if m in NOT_DUMPED_MODULES:
# We don't support incremental checking of changes to builtins, etc.
continue
s = modules[m].accept(self.str_conv)
a.extend(s.splitlines())
return a
def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]:
a = []
for id in sorted(modules):
if not is_dumped_module(id):
# We don't support incremental checking of changes to builtins, etc.
continue
a.extend(self.dump_symbol_table(id, modules[id].names))
return a
def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]:
a = [f'{module_id}:']
for name in sorted(symtable):
if name.startswith('__'):
continue
a.append(f' {name}: {self.format_symbol_table_node(symtable[name])}')
return a
def format_symbol_table_node(self, node: SymbolTableNode) -> str:
if node.node is None:
if node.kind == UNBOUND_IMPORTED:
return 'UNBOUND_IMPORTED'
return 'None'
if isinstance(node.node, Node):
s = f'{str(type(node.node).__name__)}<{self.id_mapper.id(node.node)}>'
else:
s = f'? ({type(node.node)})'
if (isinstance(node.node, Var) and node.node.type and
not node.node.fullname.startswith('typing.')):
typestr = self.format_type(node.node.type)
s += f'({typestr})'
return s
def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]:
a = []
for id in sorted(modules):
if not is_dumped_module(id):
continue
a.extend(self.dump_typeinfos_recursive(modules[id].names))
return a
def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]:
a = []
for name, node in sorted(names.items(), key=lambda x: x[0]):
if isinstance(node.node, TypeInfo):
a.extend(self.dump_typeinfo(node.node))
a.extend(self.dump_typeinfos_recursive(node.node.names))
return a
def dump_typeinfo(self, info: TypeInfo) -> List[str]:
if info.fullname == 'enum.Enum':
# Avoid noise
return []
s = info.dump(str_conv=self.str_conv,
type_str_conv=self.type_str_conv)
return s.splitlines()
def dump_types(self, manager: FineGrainedBuildManager) -> List[str]:
a = []
# To make the results repeatable, we try to generate unique and
# deterministic sort keys.
for module_id in sorted(manager.manager.modules):
if not is_dumped_module(module_id):
continue
all_types = manager.manager.all_types
# Compute a module type map from the global type map
tree = manager.graph[module_id].tree
assert tree is not None
type_map = {node: all_types[node]
for node in get_subexpressions(tree)
if node in all_types}
if type_map:
a.append(f'## {module_id}')
for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
str(n) + str(type_map[n]))):
typ = type_map[expr]
a.append(f'{short_type(expr)}:{expr.line}: {self.format_type(typ)}')
return a
def format_type(self, typ: Type) -> str:
return typ.accept(self.type_str_conv)
def is_dumped_module(id: str) -> bool:
return id not in NOT_DUMPED_MODULES and (not id.startswith('_') or id == '__main__')