diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index cb20305a72..8b770aed09 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -40,7 +40,7 @@ class IniLexer(RegexLexer): (r'\s+', Text), (r'[;#].*', Comment.Single), (r'\[.*?\]$', Keyword), - (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)', + (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)', bygroups(Name.Attribute, Text, Operator, Text, String)), # standalone option, supported by some INI parsers (r'(.+?)$', Name.Attribute), diff --git a/tests/test_ini_lexer.py b/tests/test_ini_lexer.py new file mode 100644 index 0000000000..9959a9ac90 --- /dev/null +++ b/tests/test_ini_lexer.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +""" + Pygments INI lexer tests + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import pytest +from pygments.lexers.configs import IniLexer +from pygments.token import Token, String, Keyword, Name, Operator + + +@pytest.fixture(scope='module') +def lexer(): + yield IniLexer() + + +def test_indented_entries(lexer): + fragment = \ + '[section]\n' \ + ' key1=value1\n' \ + ' key2=value2\n' + tokens = [ + (Keyword, '[section]'), + (Token.Text, '\n '), + (Name.Attribute, 'key1'), + (Operator, '='), + (String, 'value1'), + (Token.Text, '\n '), + (Name.Attribute, 'key2'), + (Operator, '='), + (String, 'value2'), + (Token.Text, '\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + fragment = \ + '[section]\n' \ + ' key1 = value1\n' \ + ' key2 = value2\n' + tokens = [ + (Keyword, '[section]'), + (Token.Text, '\n '), + (Name.Attribute, 'key1'), + (Token.Text, ' '), + (Operator, '='), + (Token.Text, ' '), + (String, 'value1'), + (Token.Text, '\n '), + (Name.Attribute, 'key2'), + (Token.Text, ' '), + (Operator, '='), + (Token.Text, ' '), + (String, 'value2'), + (Token.Text, '\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + fragment = \ + '[section]\n' \ + ' key 1 = value 1\n' \ + ' key 2 = value 2\n' + tokens = [ + (Keyword, '[section]'), + (Token.Text, '\n '), + (Name.Attribute, 'key 1'), + (Token.Text, ' '), + (Operator, '='), + (Token.Text, ' '), + (String, 'value 1'), + (Token.Text, '\n '), + (Name.Attribute, 'key 2'), + (Token.Text, ' '), + (Operator, '='), + (Token.Text, ' '), + (String, 'value 2'), + (Token.Text, '\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens