Skip to content

Commit

Permalink
support indented entries in IniLexer (#1624)
Browse files Browse the repository at this point in the history
  • Loading branch information
Leistungsabfall committed Jan 4, 2021
1 parent f8b097c commit 9b0218e
Show file tree
Hide file tree
Showing 2 changed files with 82 additions and 1 deletion.
2 changes: 1 addition & 1 deletion pygments/lexers/configs.py
Expand Up @@ -40,7 +40,7 @@ class IniLexer(RegexLexer):
(r'\s+', Text),
(r'[;#].*', Comment.Single),
(r'\[.*?\]$', Keyword),
(r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
(r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)',
bygroups(Name.Attribute, Text, Operator, Text, String)),
# standalone option, supported by some INI parsers
(r'(.+?)$', Name.Attribute),
Expand Down
81 changes: 81 additions & 0 deletions tests/test_ini_lexer.py
@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
"""
Pygments INI lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""

import pytest
from pygments.lexers.configs import IniLexer
from pygments.token import Token, String, Keyword, Name, Operator


@pytest.fixture(scope='module')
def lexer():
yield IniLexer()


def test_indented_entries(lexer):
fragment = \
'[section]\n' \
' key1=value1\n' \
' key2=value2\n'
tokens = [
(Keyword, '[section]'),
(Token.Text, '\n '),
(Name.Attribute, 'key1'),
(Operator, '='),
(String, 'value1'),
(Token.Text, '\n '),
(Name.Attribute, 'key2'),
(Operator, '='),
(String, 'value2'),
(Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens

fragment = \
'[section]\n' \
' key1 = value1\n' \
' key2 = value2\n'
tokens = [
(Keyword, '[section]'),
(Token.Text, '\n '),
(Name.Attribute, 'key1'),
(Token.Text, ' '),
(Operator, '='),
(Token.Text, ' '),
(String, 'value1'),
(Token.Text, '\n '),
(Name.Attribute, 'key2'),
(Token.Text, ' '),
(Operator, '='),
(Token.Text, ' '),
(String, 'value2'),
(Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens

fragment = \
'[section]\n' \
' key 1 = value 1\n' \
' key 2 = value 2\n'
tokens = [
(Keyword, '[section]'),
(Token.Text, '\n '),
(Name.Attribute, 'key 1'),
(Token.Text, ' '),
(Operator, '='),
(Token.Text, ' '),
(String, 'value 1'),
(Token.Text, '\n '),
(Name.Attribute, 'key 2'),
(Token.Text, ' '),
(Operator, '='),
(Token.Text, ' '),
(String, 'value 2'),
(Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens

0 comments on commit 9b0218e

Please sign in to comment.