diff options
| author | qiu-x <alex@alexslomka.xyz> | 2022-06-29 07:56:51 +0200 |
|---|---|---|
| committer | Felix Van der Jeugt <felix.vanderjeugt@posteo.net> | 2022-11-29 21:57:18 +0100 |
| commit | 8a420ecc4c1ed50111464ec66901bd983eaf2dbd (patch) | |
| tree | f31d2186cafaee6e7f18d32fe99144c3e8148c00 /lua/lexers/ruby.lua | |
| parent | 981b90a203484182feace48471fe2b53dae7676f (diff) | |
| download | vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.gz vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.xz | |
Resync the lexers with Scintillua
- Resync the lexers with Scintillua
- Update the lexer readme
- Update `zenburn` theme to fix some highlighting issues
- lexers: redirect print function to vis:info()
- Fix support for custom style names
- As per error message "lexer.delimited_range() is deprecated, use lexer.range()".
- Remove remaining `lexer.delimited_range()` call
- Set syntax to `nil` if the file type has no matching lexer
- Updated Go lexer for Go 1.18.
- lexers/dsv: convert to new lexer format
(cherry picked from commit 9edbc3cd9ea1d7142b1305840432a3d2739e755a)
- lexers/gemini: disable legacy gemini lexer
This reverts commit 468f9ee1b027a7ce98b1a249fa1af5888feeb989.
It is in legacy format and of questionable quality. Ideally it
should be contributed upstream from where it will eventually
trickle down to us.
- lexers/git-rebase: convert to new lexer format
(cherry picked from commit 4000a4cc9ac4a4c2869dfae772b977a82aee8d8c)
- lexers/strace: convert to new lexer format
(cherry picked from commit e420451320d97eb164f5629c1bcfab0b595be29d)
- lexers/typescript: add new upstream lexer revision 28e2b60
(cherry picked from commit 7326e6deecdaa75fa94ae9ebdb653f9f907b33f2)
- use `package.searchpath` instead of a local `searchpath` function
- Restore `filetype: support filetype detection via hashbang`
- Remove redundant comment
- Restore gemini lexer
Diffstat (limited to 'lua/lexers/ruby.lua')
| -rw-r--r-- | lua/lexers/ruby.lua | 188 |
1 files changed, 84 insertions, 104 deletions
diff --git a/lua/lexers/ruby.lua b/lua/lexers/ruby.lua index ec712c1..f6ba415 100644 --- a/lua/lexers/ruby.lua +++ b/lua/lexers/ruby.lua @@ -1,148 +1,128 @@ --- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE. +-- Copyright 2006-2022 Mitchell. See LICENSE. -- Ruby LPeg lexer. -local l = require('lexer') -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S +local lexer = require('lexer') +local token, word_match = lexer.token, lexer.word_match +local P, S = lpeg.P, lpeg.S -local M = {_NAME = 'ruby'} +local lex = lexer.new('ruby') -- Whitespace. -local ws = token(l.WHITESPACE, l.space^1) +lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) + +-- Keywords. +lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ + 'BEGIN', 'END', 'alias', 'and', 'begin', 'break', 'case', 'class', 'def', 'defined?', 'do', + 'else', 'elsif', 'end', 'ensure', 'false', 'for', 'if', 'in', 'module', 'next', 'nil', 'not', + 'or', 'redo', 'rescue', 'retry', 'return', 'self', 'super', 'then', 'true', 'undef', 'unless', + 'until', 'when', 'while', 'yield', '__FILE__', '__LINE__' +})) + +-- Functions. +lex:add_rule('function', token(lexer.FUNCTION, word_match{ + 'at_exit', 'autoload', 'binding', 'caller', 'catch', 'chop', 'chop!', 'chomp', 'chomp!', 'eval', + 'exec', 'exit', 'exit!', 'extend', 'fail', 'fork', 'format', 'gets', 'global_variables', 'gsub', + 'gsub!', 'include', 'iterator?', 'lambda', 'load', 'local_variables', 'loop', 'module_function', + 'open', 'p', 'print', 'printf', 'proc', 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', + 'require', 'require_relative', 'select', 'sleep', 'split', 'sprintf', 'srand', 'sub', 'sub!', + 'syscall', 'system', 'test', 'trace_var', 'trap', 'untrace_var' +}) * -S('.:|')) + +-- Identifiers. +local word_char = lexer.alnum + S('_!?') +local word = (lexer.alpha + '_') * word_char^0 +lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) -- Comments. -local line_comment = '#' * l.nonnewline_esc^0 -local block_comment = l.starts_line('=begin') * (l.any - l.newline * '=end')^0 * - (l.newline * '=end')^-1 -local comment = token(l.COMMENT, block_comment + line_comment) +local line_comment = lexer.to_eol('#', true) +local block_comment = lexer.range(lexer.starts_line('=begin'), lexer.starts_line('=end')) +lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment)) +-- Strings. local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}'} -local literal_delimitted = P(function(input, index) +local literal_delimited = P(function(input, index) local delimiter = input:sub(index, index) if not delimiter:find('[%w\r\n\f\t ]') then -- only non alpha-numerics local match_pos, patt if delimiter_matches[delimiter] then -- Handle nested delimiter/matches in strings. local s, e = delimiter, delimiter_matches[delimiter] - patt = l.delimited_range(s..e, false, false, true) + patt = lexer.range(s, e, false, true, true) else - patt = l.delimited_range(delimiter) + patt = lexer.range(delimiter) end match_pos = lpeg.match(patt, input, index) return match_pos or #input + 1 end end) --- Strings. -local cmd_str = l.delimited_range('`') -local lit_cmd = '%x' * literal_delimitted -local lit_array = '%w' * literal_delimitted -local sq_str = l.delimited_range("'") -local dq_str = l.delimited_range('"') -local lit_str = '%' * S('qQ')^-1 * literal_delimitted +local cmd_str = lexer.range('`') +local lit_cmd = '%x' * literal_delimited +local lit_array = '%w' * literal_delimited +local sq_str = lexer.range("'") +local dq_str = lexer.range('"') +local lit_str = '%' * S('qQ')^-1 * literal_delimited local heredoc = '<<' * P(function(input, index) - local s, e, indented, _, delimiter = - input:find('([%-~]?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index) + local s, e, indented, _, delimiter = input:find('([%-~]?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index) if s == index and delimiter then local end_heredoc = (#indented > 0 and '[\n\r\f]+ *' or '[\n\r\f]+') - local _, e = input:find(end_heredoc..delimiter, e) + e = select(2, input:find(end_heredoc .. delimiter, e)) return e and e + 1 or #input + 1 end end) +local string = token(lexer.STRING, (sq_str + dq_str + lit_str + heredoc + cmd_str + lit_cmd + + lit_array) * S('f')^-1) -- TODO: regex_str fails with `obj.method /patt/` syntax. -local regex_str = #P('/') * l.last_char_includes('!%^&*([{-=+|:;,?<>~') * - l.delimited_range('/', true, false) * S('iomx')^0 -local lit_regex = '%r' * literal_delimitted * S('iomx')^0 -local string = token(l.STRING, (sq_str + dq_str + lit_str + heredoc + cmd_str + - lit_cmd + lit_array) * S('f')^-1) + - token(l.REGEX, regex_str + lit_regex) - -local word_char = l.alnum + S('_!?') +local regex_str = + #P('/') * lexer.last_char_includes('!%^&*([{-=+|:;,?<>~') * lexer.range('/', true) * S('iomx')^0 +local lit_regex = '%r' * literal_delimited * S('iomx')^0 +local regex = token(lexer.REGEX, regex_str + lit_regex) +lex:add_rule('string', string + regex) -- Numbers. -local dec = l.digit^1 * ('_' * l.digit^1)^0 * S('ri')^-1 -local bin = '0b' * S('01')^1 * ('_' * S('01')^1)^0 -local integer = S('+-')^-1 * (bin + l.hex_num + l.oct_num + dec) +local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0 * S('ri')^-1 +local bin = '0b' * S('01')^1 * ('_' * S('01')^1)^0 * -lexer.xdigit +local integer = S('+-')^-1 * (bin + lexer.hex_num + lexer.oct_num + dec) -- TODO: meta, control, etc. for numeric_literal. -local numeric_literal = '?' * (l.any - l.space) * -word_char -local number = token(l.NUMBER, l.float * S('ri')^-1 + integer + numeric_literal) - --- Keywords. -local keyword = token(l.KEYWORD, word_match({ - 'BEGIN', 'END', 'alias', 'and', 'begin', 'break', 'case', 'class', 'def', - 'defined?', 'do', 'else', 'elsif', 'end', 'ensure', 'false', 'for', 'if', - 'in', 'module', 'next', 'nil', 'not', 'or', 'redo', 'rescue', 'retry', - 'return', 'self', 'super', 'then', 'true', 'undef', 'unless', 'until', 'when', - 'while', 'yield', '__FILE__', '__LINE__' -}, '?!')) - --- Functions. -local func = token(l.FUNCTION, word_match({ - 'at_exit', 'autoload', 'binding', 'caller', 'catch', 'chop', 'chop!', 'chomp', - 'chomp!', 'eval', 'exec', 'exit', 'exit!', 'extend', 'fail', 'fork', 'format', 'gets', - 'global_variables', 'gsub', 'gsub!', 'include', 'iterator?', 'lambda', 'load', - 'local_variables', 'loop', 'module_function', 'open', 'p', 'print', 'printf', 'proc', 'putc', - 'puts', 'raise', 'rand', 'readline', 'readlines', 'require', 'require_relative', 'select', - 'sleep', 'split', 'sprintf', 'srand', 'sub', 'sub!', 'syscall', 'system', - 'test', 'trace_var', 'trap', 'untrace_var' -}, '?!')) * -S('.:|') - --- Identifiers. -local word = (l.alpha + '_') * word_char^0 -local identifier = token(l.IDENTIFIER, word) +local numeric_literal = '?' * (lexer.any - lexer.space) * -word_char +lex:add_rule('number', token(lexer.NUMBER, lexer.float * S('ri')^-1 + integer + numeric_literal)) -- Variables. -local global_var = '$' * (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + l.digit + '-' * - S('0FadiIKlpvw')) +local global_var = '$' * + (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + lexer.digit + '-' * S('0FadiIKlpvw')) local class_var = '@@' * word local inst_var = '@' * word -local variable = token(l.VARIABLE, global_var + class_var + inst_var) +lex:add_rule('variable', token(lexer.VARIABLE, global_var + class_var + inst_var)) -- Symbols. -local symbol = token('symbol', ':' * P(function(input, index) +lex:add_rule('symbol', token('symbol', ':' * P(function(input, index) if input:sub(index - 2, index - 2) ~= ':' then return index end -end) * (word_char^1 + sq_str + dq_str)) +end) * (word_char^1 + sq_str + dq_str))) +lex:add_style('symbol', lexer.styles.constant) -- Operators. -local operator = token(l.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~')) - -M._rules = { - {'whitespace', ws}, - {'keyword', keyword}, - {'function', func}, - {'identifier', identifier}, - {'comment', comment}, - {'string', string}, - {'number', number}, - {'variable', variable}, - {'symbol', symbol}, - {'operator', operator}, -} - -M._tokenstyles = { - symbol = l.STYLE_CONSTANT -} +lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~'))) +-- Fold points. local function disambiguate(text, pos, line, s) - return line:sub(1, s - 1):match('^%s*$') and - not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and 1 or 0 + return line:sub(1, s - 1):match('^%s*$') and not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and + 1 or 0 end - -M._foldsymbols = { - _patterns = {'%l+', '[%(%)%[%]{}]', '=begin', '=end', '#'}, - [l.KEYWORD] = { - begin = 1, class = 1, def = 1, ['do'] = 1, ['for'] = 1, ['module'] = 1, - case = 1, - ['if'] = disambiguate, ['while'] = disambiguate, - ['unless'] = disambiguate, ['until'] = disambiguate, - ['end'] = -1 - }, - [l.OPERATOR] = { - ['('] = 1, [')'] = -1, ['['] = 1, [']'] = -1, ['{'] = 1, ['}'] = -1 - }, - [l.COMMENT] = { - ['=begin'] = 1, ['=end'] = -1, ['#'] = l.fold_line_comments('#') - } -} - -return M +lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') +lex:add_fold_point(lexer.KEYWORD, 'class', 'end') +lex:add_fold_point(lexer.KEYWORD, 'def', 'end') +lex:add_fold_point(lexer.KEYWORD, 'do', 'end') +lex:add_fold_point(lexer.KEYWORD, 'for', 'end') +lex:add_fold_point(lexer.KEYWORD, 'module', 'end') +lex:add_fold_point(lexer.KEYWORD, 'case', 'end') +lex:add_fold_point(lexer.KEYWORD, 'if', disambiguate) +lex:add_fold_point(lexer.KEYWORD, 'while', disambiguate) +lex:add_fold_point(lexer.KEYWORD, 'unless', disambiguate) +lex:add_fold_point(lexer.KEYWORD, 'until', disambiguate) +lex:add_fold_point(lexer.OPERATOR, '(', ')') +lex:add_fold_point(lexer.OPERATOR, '[', ']') +lex:add_fold_point(lexer.OPERATOR, '{', '}') +lex:add_fold_point(lexer.COMMENT, '=begin', '=end') +lex:add_fold_point(lexer.COMMENT, lexer.fold_consecutive_lines('#')) + +return lex |
