forked from whitequark/parser
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_lexer.rb
148 lines (114 loc) · 3.93 KB
/
test_lexer.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# encoding: ascii-8bit
# frozen_string_literal: true
require 'helper'
require 'complex'
require 'parser/ruby-next/lexer'
class TestLexerNext < Minitest::Test
def setup_lexer(version)
@lex = version == "next" ? Parser::Lexer::Next.new(28) : Parser::Lexer.new(version)
@lex.comments = []
@lex.diagnostics = Parser::Diagnostic::Engine.new
@lex.diagnostics.all_errors_are_fatal = true
# @lex.diagnostics.consumer = lambda { |diag| $stderr.puts "", diag.render }
end
def setup
setup_lexer 18
end
def utf(str)
str.dup.force_encoding(Encoding::UTF_8)
end
#
# Additional matchers
#
def refute_scanned(s, *args)
assert_raises Parser::SyntaxError do
assert_scanned(s, *args)
end
end
def assert_escape(expected, input)
source_buffer = Parser::Source::Buffer.new('(assert_escape)')
source_buffer.source = "\"\\#{input}\"".encode(input.encoding)
@lex.reset
@lex.source_buffer = source_buffer
lex_token, (lex_value, *) = @lex.advance
lex_value.force_encoding(Encoding::BINARY)
assert_equal [:tSTRING, expected],
[lex_token, lex_value],
source_buffer.source
end
def refute_escape(input)
err = assert_raises Parser::SyntaxError do
@lex.state = :expr_beg
assert_scanned "%Q[\\#{input}]"
end
assert_equal :fatal, err.diagnostic.level
end
def assert_lex_fname(name, type, range)
begin_pos, end_pos = range
assert_scanned("def #{name} ",
:kDEF, 'def', [0, 3],
type, name, [begin_pos + 4, end_pos + 4])
assert_equal :expr_endfn, @lex.state
end
def assert_scanned(input, *args)
source_buffer = Parser::Source::Buffer.new('(assert_scanned)')
source_buffer.source = input
@lex.reset(false)
@lex.source_buffer = source_buffer
until args.empty? do
token, value, (begin_pos, end_pos) = args.shift(3)
lex_token, (lex_value, lex_range) = @lex.advance
assert lex_token, 'no more tokens'
assert_operator [lex_token, lex_value], :eql?, [token, value], input
assert_equal begin_pos, lex_range.begin_pos
assert_equal end_pos, lex_range.end_pos
end
lex_token, (lex_value, *) = @lex.advance
refute lex_token, "must be empty, but had #{[lex_token, lex_value].inspect}"
end
def test_meth_ref
setup_lexer "next"
assert_scanned('foo.:bar',
:tIDENTIFIER, 'foo', [0, 3],
:tMETHREF, '.:', [3, 5],
:tIDENTIFIER, 'bar', [5, 8])
assert_scanned('foo .:bar',
:tIDENTIFIER, 'foo', [0, 3],
:tMETHREF, '.:', [4, 6],
:tIDENTIFIER, 'bar', [6, 9])
end
def test_meth_ref_unary_op
setup_lexer "next"
assert_scanned('foo.:+',
:tIDENTIFIER, 'foo', [0, 3],
:tMETHREF, '.:', [3, 5],
:tPLUS, '+', [5, 6])
assert_scanned('foo.:-@',
:tIDENTIFIER, 'foo', [0, 3],
:tMETHREF, '.:', [3, 5],
:tUMINUS, '-@', [5, 7])
end
def test_meth_ref_unsupported_newlines
setup_lexer "next"
# MRI emits exactly the same sequence of tokens,
# the error happens later in the parser
assert_scanned('foo. :+',
:tIDENTIFIER, 'foo', [0, 3],
:tDOT, '.', [3, 4],
:tCOLON, ':', [5, 6],
:tUPLUS, '+', [6, 7])
assert_scanned('foo.: +',
:tIDENTIFIER, 'foo', [0, 3],
:tDOT, '.', [3, 4],
:tCOLON, ':', [4, 5],
:tPLUS, '+', [6, 7])
end
def test_endless_method
setup_lexer "next"
assert_scanned('def foo = 42',
:kDEF, "def", [0, 3],
:tIDENTIFIER, 'foo', [4, 7],
:tEQL, "=", [8, 9],
:tINTEGER, 42, [10, 12])
end
end