Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/felixhao28/pygments into …
Browse files Browse the repository at this point in the history
…felixhao28-master
  • Loading branch information
Anteru committed Jan 4, 2021
2 parents a590ac5 + 05effda commit 5b432df
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pygments/lexers/javascript.py
Expand Up @@ -509,7 +509,7 @@ class TypeScriptLexer(RegexLexer):
(r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
# Match stuff like: function() {...}
(r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
(r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
# Match stuff like: (function: return type)
(r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
bygroups(Name.Other, Text, Keyword.Type)),
Expand Down
26 changes: 24 additions & 2 deletions tests/test_javascript.py
Expand Up @@ -9,8 +9,8 @@

import pytest

from pygments.lexers.javascript import JavascriptLexer
from pygments.token import Number
from pygments.lexers.javascript import JavascriptLexer, TypeScriptLexer
from pygments.token import Number, Token


@pytest.fixture(scope='module')
Expand Down Expand Up @@ -82,3 +82,25 @@ def test_hexadecimal_literal_positive_matches(lexer, text):
def test_hexadecimal_literals_negative_matches(lexer, text):
"""Test text that should **not** be tokenized as hexadecimal literals."""
assert list(lexer.get_tokens(text))[0] != (Number.Hex, text)

@pytest.fixture(scope='module')
def ts_lexer():
yield TypeScriptLexer()

def test_function_definition(ts_lexer):
fragment = u'async function main() {\n}'
tokens = [
(Token.Keyword, u'async'),
(Token.Text, u' '),
(Token.Keyword.Declaration, u'function'),
(Token.Text, u' '),
(Token.Name.Other, u'main'),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u' '),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
]
assert list(ts_lexer.get_tokens(fragment)) == tokens

0 comments on commit 5b432df

Please sign in to comment.