Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

consider trailing whitespace a part of the prompt, making copy/paste more straight forward #1645

Merged
merged 2 commits into from Dec 28, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion pygments/lexers/shell.py
Expand Up @@ -232,7 +232,7 @@ class BashSessionLexer(ShellSessionBaseLexer):
_innerLexerCls = BashLexer
_ps1rgx = re.compile(
r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
_ps2 = '>'


Expand Down
33 changes: 28 additions & 5 deletions tests/test_shell.py
Expand Up @@ -142,9 +142,8 @@ def test_end_of_line_nums(lexer_bash):
def test_newline_in_echo(lexer_session):
fragment = '$ echo \\\nhi\nhi\n'
tokens = [
(Token.Text, ''),
(Token.Generic.Prompt, '$'),
(Token.Text, ' '),
(Token.Name.Builtin, ''),
(Token.Generic.Prompt, '$ '),
(Token.Name.Builtin, 'echo'),
(Token.Text, ' '),
(Token.Literal.String.Escape, '\\\n'),
Expand All @@ -155,6 +154,31 @@ def test_newline_in_echo(lexer_session):
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_newline_in_ls(lexer_session):
fragment = '$ ls \\\nhi\nhi\n'
tokens = [
(Token.Text, ''),
(Token.Generic.Prompt, '$ '),
(Token.Text, 'ls'),
(Token.Text, ' '),
(Token.Literal.String.Escape, '\\\n'),
(Token.Text, 'hi'),
(Token.Text, '\n'),
(Token.Generic.Output, 'hi\n'),
]
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_comment_after_prompt(lexer_session):
fragment = '$# comment'
tokens = [
(Token.Comment.Single, ''),
(Token.Generic.Prompt, '$'),
(Token.Comment.Single, '# comment\n'),
]
assert list(lexer_session.get_tokens(fragment)) == tokens


def test_msdos_gt_only(lexer_msdos):
fragment = '> py\nhi\n'
tokens = [
Expand Down Expand Up @@ -217,8 +241,7 @@ def test_virtualenv(lexer_session):
(Token.Text, ''),
(Token.Text, ' '),
(Token.Text, ''),
(Token.Generic.Prompt, '[~/project]$'),
(Token.Text, ' '),
(Token.Generic.Prompt, '[~/project]$ '),
(Token.Text, 'foo'),
(Token.Text, ' '),
(Token.Text, '-h'),
Expand Down