aboutsummaryrefslogtreecommitdiff
path: root/lua/lexers/gherkin.lua
diff options
context:
space:
mode:
Diffstat (limited to 'lua/lexers/gherkin.lua')
-rw-r--r--lua/lexers/gherkin.lua74
1 files changed, 25 insertions, 49 deletions
diff --git a/lua/lexers/gherkin.lua b/lua/lexers/gherkin.lua
index fa7d898..c876fe3 100644
--- a/lua/lexers/gherkin.lua
+++ b/lua/lexers/gherkin.lua
@@ -1,64 +1,40 @@
--- Copyright 2015-2017 Jason Schindler. See LICENSE.
+-- Copyright 2015-2022 Jason Schindler. See LICENSE.
-- Gherkin (https://github.com/cucumber/cucumber/wiki/Gherkin) LPeg lexer.
-local l = require('lexer')
-local token, word_match = l.token, l.word_match
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, S = lpeg.P, lpeg.S
-local M = {_NAME = 'gherkin'}
+local lex = lexer.new('gherkin', {fold_by_indentation = true})
-- Whitespace.
-local ws = token(l.WHITESPACE, l.space^1)
-
--- Tags.
-local tag = token('tag', '@' * l.word^0)
-
--- Comments.
-local comment = token(l.COMMENT, '#' * l.nonnewline^0)
-
--- Strings.
-local doc_str = '"""' * (l.any - '"""')^0 * P('"""')^-1
-local dq_str = l.delimited_range('"')
-
-local string = token(l.STRING, doc_str + dq_str)
-
--- Placeholders.
-local placeholder = token('placeholder', l.nested_pair('<', '>'))
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
-- Keywords.
-local keyword = token(l.KEYWORD, word_match{
- 'Given', 'When', 'Then', 'And', 'But'
-})
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match(
+ 'And Background But Examples Feature Given Outline Scenario Scenarios Then When')))
--- Identifiers.
-local identifier = token(l.KEYWORD, P('Scenario Outline') + word_match{
- 'Feature', 'Background', 'Scenario', 'Scenarios', 'Examples'
-})
+-- Strings.
+local doc_str = lexer.range('"""')
+local dq_str = lexer.range('"')
+lex:add_rule('string', token(lexer.STRING, doc_str + dq_str))
--- Examples.
-local example = token('example', '|' * l.nonnewline^0)
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#')))
-- Numbers.
-local number = token(l.NUMBER, l.float + l.integer)
+-- lex:add_rule('number', token(lexer.NUMBER, lexer.number))
-M._rules = {
- {'whitespace', ws},
- {'comment', comment},
- {'tag', tag},
- {'placeholder', placeholder},
- {'keyword', keyword},
- {'identifier', identifier},
- {'example', example},
- {'string', string},
- {'number', number}
-}
+-- Tags.
+lex:add_rule('tag', token('tag', '@' * lexer.word^0))
+lex:add_style('tag', lexer.styles.label)
-M._tokenstyles = {
- tag = l.STYLE_LABEL,
- placeholder = l.STYLE_NUMBER,
- example = l.STYLE_NUMBER
-}
+-- Placeholders.
+lex:add_rule('placeholder', token('placeholder', lexer.range('<', '>', false, false, true)))
+lex:add_style('placeholder', lexer.styles.variable)
-M._FOLDBYINDENTATION = true
+-- Examples.
+lex:add_rule('example', token('example', lexer.to_eol('|')))
+lex:add_style('example', lexer.styles.number)
-return M
+return lex