Skip to content

Commit

Permalink
Merge github.com:mathiasertl/pygments
Browse files Browse the repository at this point in the history
fixes #1645
  • Loading branch information
birkenfeld committed Dec 28, 2020
2 parents a3d4100 + 456ac59 commit 00a31bc
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGES
Expand Up @@ -22,6 +22,7 @@ Version 2.7.4
- Limit recursion with nesting Ruby heredocs (#1638)
- Fixed a few inefficient regexes for guessing lexers
- Fixed the raw token lexer handling of Unicode (#1616)
- Lex trailing whitespace as part of the prompt in shell lexers (#1645)

Thanks to Google's OSS-Fuzz project for finding many of these bugs.

Expand Down
2 changes: 1 addition & 1 deletion pygments/lexers/shell.py
Expand Up @@ -232,7 +232,7 @@ class BashSessionLexer(ShellSessionBaseLexer):
_innerLexerCls = BashLexer
_ps1rgx = re.compile(
r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
_ps2 = '>'


Expand Down
29 changes: 25 additions & 4 deletions tests/test_shell.py
Expand Up @@ -143,8 +143,7 @@ def test_end_of_line_nums(lexer_bash):
def test_newline_in_echo(lexer_session):
fragment = '$ echo \\\nhi\nhi\n'
tokens = [
(Token.Generic.Prompt, '$'),
(Token.Text, ' '),
(Token.Generic.Prompt, '$ '),
(Token.Name.Builtin, 'echo'),
(Token.Text, ' '),
(Token.Literal.String.Escape, '\\\n'),
Expand All @@ -155,6 +154,29 @@ def test_newline_in_echo(lexer_session):
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_newline_in_ls(lexer_session):
fragment = '$ ls \\\nhi\nhi\n'
tokens = [
(Token.Generic.Prompt, '$ '),
(Token.Text, 'ls'),
(Token.Text, ' '),
(Token.Literal.String.Escape, '\\\n'),
(Token.Text, 'hi'),
(Token.Text, '\n'),
(Token.Generic.Output, 'hi\n'),
]
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_comment_after_prompt(lexer_session):
fragment = '$# comment'
tokens = [
(Token.Generic.Prompt, '$'),
(Token.Comment.Single, '# comment\n'),
]
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_msdos_gt_only(lexer_msdos):
fragment = '> py\nhi\n'
tokens = [
Expand Down Expand Up @@ -208,8 +230,7 @@ def test_virtualenv(lexer_session):
tokens = [
(Token.Generic.Prompt.VirtualEnv, '(env)'),
(Token.Text, ' '),
(Token.Generic.Prompt, '[~/project]$'),
(Token.Text, ' '),
(Token.Generic.Prompt, '[~/project]$ '),
(Token.Text, 'foo'),
(Token.Text, ' '),
(Token.Text, '-h'),
Expand Down

0 comments on commit 00a31bc

Please sign in to comment.