Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add lexers.specials.OutputLexer. #1836

Merged
merged 2 commits into from Jun 7, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions pygments/lexers/_mapping.py
Expand Up @@ -334,6 +334,7 @@
'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), ('*.txt',), ('text/plain',)),
'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
Expand Down
19 changes: 17 additions & 2 deletions pygments/lexers/special.py
Expand Up @@ -12,11 +12,11 @@
import re

from pygments.lexer import Lexer
from pygments.token import Token, Error, Text
from pygments.token import Token, Error, Text, Generic
from pygments.util import get_choice_opt


__all__ = ['TextLexer', 'RawTokenLexer']
__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']


class TextLexer(Lexer):
Expand All @@ -36,6 +36,21 @@ def analyse_text(text):
return TextLexer.priority


class OutputLexer(Lexer):
"""
Simple lexer that highlights everything as ``Token.Generic.Output``.

.. versionadded:: 2.10
"""
name = 'Text output'
aliases = ['output']
filenames = ['*.txt']
mimetypes = ['text/plain']

def get_tokens_unprocessed(self, text):
yield 0, Generic.Output, text


_ttype_cache = {}

line_re = re.compile('.*?\n')
Expand Down