From 8a420ecc4c1ed50111464ec66901bd983eaf2dbd Mon Sep 17 00:00:00 2001 From: qiu-x Date: Wed, 29 Jun 2022 07:56:51 +0200 Subject: Resync the lexers with Scintillua - Resync the lexers with Scintillua - Update the lexer readme - Update `zenburn` theme to fix some highlighting issues - lexers: redirect print function to vis:info() - Fix support for custom style names - As per error message "lexer.delimited_range() is deprecated, use lexer.range()". - Remove remaining `lexer.delimited_range()` call - Set syntax to `nil` if the file type has no matching lexer - Updated Go lexer for Go 1.18. - lexers/dsv: convert to new lexer format (cherry picked from commit 9edbc3cd9ea1d7142b1305840432a3d2739e755a) - lexers/gemini: disable legacy gemini lexer This reverts commit 468f9ee1b027a7ce98b1a249fa1af5888feeb989. It is in legacy format and of questionable quality. Ideally it should be contributed upstream from where it will eventually trickle down to us. - lexers/git-rebase: convert to new lexer format (cherry picked from commit 4000a4cc9ac4a4c2869dfae772b977a82aee8d8c) - lexers/strace: convert to new lexer format (cherry picked from commit e420451320d97eb164f5629c1bcfab0b595be29d) - lexers/typescript: add new upstream lexer revision 28e2b60 (cherry picked from commit 7326e6deecdaa75fa94ae9ebdb653f9f907b33f2) - use `package.searchpath` instead of a local `searchpath` function - Restore `filetype: support filetype detection via hashbang` - Remove redundant comment - Restore gemini lexer --- lua/lexers/lua.lua | 232 ++++++++++++++++++++++++----------------------------- 1 file changed, 105 insertions(+), 127 deletions(-) (limited to 'lua/lexers/lua.lua') diff --git a/lua/lexers/lua.lua b/lua/lexers/lua.lua index c2edf69..03c37e0 100644 --- a/lua/lexers/lua.lua +++ b/lua/lexers/lua.lua @@ -1,86 +1,69 @@ --- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE. +-- Copyright 2006-2022 Mitchell. See LICENSE. -- Lua LPeg lexer. -- Original written by Peter Odding, 2007/04/04. -local l = require('lexer') -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S +local lexer = require('lexer') +local token, word_match = lexer.token, lexer.word_match +local B, P, S = lpeg.B, lpeg.P, lpeg.S -local M = {_NAME = 'lua'} +local lex = lexer.new('lua') -- Whitespace. -local ws = token(l.WHITESPACE, l.space^1) - -local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[', - function(input, index, eq) - local _, e = input:find(']'..eq..']', index, true) - return (e or #input) + 1 - end) - --- Comments. -local line_comment = '--' * l.nonnewline^0 -local block_comment = '--' * longstring -local comment = token(l.COMMENT, block_comment + line_comment) - --- Strings. -local sq_str = l.delimited_range("'") -local dq_str = l.delimited_range('"') -local string = token(l.STRING, sq_str + dq_str) + - token('longstring', longstring) - --- Numbers. -local lua_integer = P('-')^-1 * (l.hex_num + l.dec_num) -local number = token(l.NUMBER, l.float + lua_integer) +lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) -- Keywords. -local keyword = token(l.KEYWORD, word_match{ - 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', - 'goto', 'if', 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', - 'true', 'until', 'while' -}) - --- Functions. -local func = token(l.FUNCTION, word_match{ - 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable', 'ipairs', - 'load', 'loadfile', 'next', 'pairs', 'pcall', 'print', 'rawequal', 'rawget', - 'rawset', 'require', 'select', 'setmetatable', 'tonumber', 'tostring', 'type', - 'xpcall', +lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ + 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', 'in', 'local', + 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while', -- Added in 5.2. - 'rawlen' + 'goto' +})) + +-- Functions and deprecated functions. +local func = token(lexer.FUNCTION, word_match{ + 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable', 'ipairs', 'load', 'loadfile', + 'next', 'pairs', 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require', 'select', + 'setmetatable', 'tonumber', 'tostring', 'type', 'xpcall', + -- Added in 5.2. + 'rawlen', + -- Added in 5.4. + 'warn' }) - --- Deprecated functions. local deprecated_func = token('deprecated_function', word_match{ -- Deprecated in 5.2. 'getfenv', 'loadstring', 'module', 'setfenv', 'unpack' }) +lex:add_rule('function', -B('.') * (func + deprecated_func)) +lex:add_style('deprecated_function', lexer.styles['function'] .. {italics = true}) -- Constants. -local constant = token(l.CONSTANT, word_match{ +lex:add_rule('constant', token(lexer.CONSTANT, -B('.') * word_match{ '_G', '_VERSION', -- Added in 5.2. '_ENV' -}) +})) --- Libraries. -local library = token('library', word_match({ +-- Libraries and deprecated libraries. +local library = token('library', word_match{ -- Coroutine. - 'coroutine', 'coroutine.create', 'coroutine.resume', 'coroutine.running', - 'coroutine.status', 'coroutine.wrap', 'coroutine.yield', + 'coroutine', 'coroutine.create', 'coroutine.resume', 'coroutine.running', 'coroutine.status', + 'coroutine.wrap', 'coroutine.yield', -- Coroutine added in 5.3. 'coroutine.isyieldable', + -- Coroutine added in 5.4. + 'coroutine.close', -- Module. - 'package', 'package.cpath', 'package.loaded', 'package.loadlib', - 'package.path', 'package.preload', + 'package', 'package.cpath', 'package.loaded', 'package.loadlib', 'package.path', + 'package.preload', -- Module added in 5.2. 'package.config', 'package.searchers', 'package.searchpath', -- UTF-8 added in 5.3. - 'utf8', 'utf8.char', 'utf8.charpattern', 'utf8.codepoint', 'utf8.codes', - 'utf8.len', 'utf8.offset', + 'utf8', 'utf8.char', 'utf8.charpattern', 'utf8.codepoint', 'utf8.codes', 'utf8.len', + 'utf8.offset', -- String. - 'string', 'string.byte', 'string.char', 'string.dump', 'string.find', - 'string.format', 'string.gmatch', 'string.gsub', 'string.len', 'string.lower', - 'string.match', 'string.rep', 'string.reverse', 'string.sub', 'string.upper', + 'string', 'string.byte', 'string.char', 'string.dump', 'string.find', 'string.format', + 'string.gmatch', 'string.gsub', 'string.len', 'string.lower', 'string.match', 'string.rep', + 'string.reverse', 'string.sub', 'string.upper', -- String added in 5.3. 'string.pack', 'string.packsize', 'string.unpack', -- Table. @@ -90,33 +73,26 @@ local library = token('library', word_match({ -- Table added in 5.3. 'table.move', -- Math. - 'math', 'math.abs', 'math.acos', 'math.asin', 'math.atan', 'math.ceil', - 'math.cos', 'math.deg', 'math.exp', 'math.floor', 'math.fmod', 'math.huge', - 'math.log', 'math.max', 'math.min', 'math.modf', 'math.pi', 'math.rad', - 'math.random', 'math.randomseed', 'math.sin', 'math.sqrt', 'math.tan', + 'math', 'math.abs', 'math.acos', 'math.asin', 'math.atan', 'math.ceil', 'math.cos', 'math.deg', + 'math.exp', 'math.floor', 'math.fmod', 'math.huge', 'math.log', 'math.max', 'math.min', + 'math.modf', 'math.pi', 'math.rad', 'math.random', 'math.randomseed', 'math.sin', 'math.sqrt', + 'math.tan', -- Math added in 5.3. - 'math.maxinteger', 'math.mininteger', 'math.tointeger', 'math.type', - 'math.ult', + 'math.maxinteger', 'math.mininteger', 'math.tointeger', 'math.type', 'math.ult', -- IO. - 'io', 'io.close', 'io.flush', 'io.input', 'io.lines', 'io.open', 'io.output', - 'io.popen', 'io.read', 'io.stderr', 'io.stdin', 'io.stdout', 'io.tmpfile', - 'io.type', 'io.write', + 'io', 'io.close', 'io.flush', 'io.input', 'io.lines', 'io.open', 'io.output', 'io.popen', + 'io.read', 'io.stderr', 'io.stdin', 'io.stdout', 'io.tmpfile', 'io.type', 'io.write', -- OS. - 'os', 'os.clock', 'os.date', 'os.difftime', 'os.execute', 'os.exit', - 'os.getenv', 'os.remove', 'os.rename', 'os.setlocale', 'os.time', - 'os.tmpname', + 'os', 'os.clock', 'os.date', 'os.difftime', 'os.execute', 'os.exit', 'os.getenv', 'os.remove', + 'os.rename', 'os.setlocale', 'os.time', 'os.tmpname', -- Debug. - 'debug', 'debug.debug', 'debug.gethook', 'debug.getinfo', 'debug.getlocal', - 'debug.getmetatable', 'debug.getregistry', 'debug.getupvalue', - 'debug.sethook', 'debug.setlocal', 'debug.setmetatable', 'debug.setupvalue', - 'debug.traceback', + 'debug', 'debug.debug', 'debug.gethook', 'debug.getinfo', 'debug.getlocal', 'debug.getmetatable', + 'debug.getregistry', 'debug.getupvalue', 'debug.sethook', 'debug.setlocal', 'debug.setmetatable', + 'debug.setupvalue', 'debug.traceback', -- Debug added in 5.2. - 'debug.getuservalue', 'debug.setuservalue', 'debug.upvalueid', - 'debug.upvaluejoin', -}, '.')) - --- Deprecated libraries. -local deprecated_library = token('deprecated_library', word_match({ + 'debug.getuservalue', 'debug.setuservalue', 'debug.upvalueid', 'debug.upvaluejoin' +}) +local deprecated_library = token('deprecated_library', word_match{ -- Module deprecated in 5.2. 'package.loaders', 'package.seeall', -- Table deprecated in 5.2. @@ -124,67 +100,69 @@ local deprecated_library = token('deprecated_library', word_match({ -- Math deprecated in 5.2. 'math.log10', -- Math deprecated in 5.3. - 'math.atan2', 'math.cosh', 'math.frexp', 'math.ldexp', 'math.pow', - 'math.sinh', 'math.tanh', + 'math.atan2', 'math.cosh', 'math.frexp', 'math.ldexp', 'math.pow', 'math.sinh', 'math.tanh', -- Bit32 deprecated in 5.3. - 'bit32', 'bit32.arshift', 'bit32.band', 'bit32.bnot', 'bit32.bor', - 'bit32.btest', 'bit32.extract', 'bit32.lrotate', 'bit32.lshift', - 'bit32.replace', 'bit32.rrotate', 'bit32.rshift', 'bit32.xor', + 'bit32', 'bit32.arshift', 'bit32.band', 'bit32.bnot', 'bit32.bor', 'bit32.btest', 'bit32.extract', + 'bit32.lrotate', 'bit32.lshift', 'bit32.replace', 'bit32.rrotate', 'bit32.rshift', 'bit32.xor', -- Debug deprecated in 5.2. 'debug.getfenv', 'debug.setfenv' -}, '.')) +}) +lex:add_rule('library', -B('.') * (library + deprecated_library)) +lex:add_style('library', lexer.styles.type) +lex:add_style('deprecated_library', lexer.styles.type .. {italics = true}) -- Identifiers. -local identifier = token(l.IDENTIFIER, l.word) +lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) + +-- Strings. +local sq_str = lexer.range("'") +local dq_str = lexer.range('"') +local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[', function(input, index, eq) + local _, e = input:find(']' .. eq .. ']', index, true) + return (e or #input) + 1 +end) +lex:add_rule('string', token(lexer.STRING, sq_str + dq_str) + token('longstring', longstring)) +lex:add_style('longstring', lexer.styles.string) + +-- Comments. +local line_comment = lexer.to_eol('--') +local block_comment = '--' * longstring +lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment)) + +-- Numbers. +local lua_integer = P('-')^-1 * (lexer.hex_num + lexer.dec_num) +lex:add_rule('number', token(lexer.NUMBER, lexer.float + lua_integer)) -- Labels. -local label = token(l.LABEL, '::' * l.word * '::') +lex:add_rule('label', token(lexer.LABEL, '::' * lexer.word * '::')) + +-- Attributes. +lex:add_rule('attribute', token('attribute', '<' * lexer.space^0 * word_match('const close') * + lexer.space^0 * '>')) +lex:add_style('attribute', lexer.styles.class) -- Operators. -local operator = token(l.OPERATOR, S('+-*/%^#=<>&|~;:,.{}[]()')) - -M._rules = { - {'whitespace', ws}, - {'keyword', keyword}, - {'function', func + deprecated_func}, - {'constant', constant}, - {'library', library + deprecated_library}, - {'identifier', identifier}, - {'string', string}, - {'comment', comment}, - {'number', number}, - {'label', label}, - {'operator', operator}, -} - -M._tokenstyles = { - longstring = l.STYLE_STRING, - deprecated_function = l.STYLE_FUNCTION..',italics', - library = l.STYLE_TYPE, - deprecated_library = l.STYLE_TYPE..',italics' -} - -local function fold_longcomment(text, pos, line, s, match) - if match == '[' then +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('+-*/%^#=<>&|~;:,.{}[]()'))) + +-- Fold points. +local function fold_longcomment(text, pos, line, s, symbol) + if symbol == '[' then if line:find('^%[=*%[', s) then return 1 end - elseif match == ']' then + elseif symbol == ']' then if line:find('^%]=*%]', s) then return -1 end end return 0 end - -M._foldsymbols = { - _patterns = {'%l+', '[%({%)}]', '[%[%]]', '%-%-'}, - [l.KEYWORD] = { - ['if'] = 1, ['do'] = 1, ['function'] = 1, ['end'] = -1, ['repeat'] = 1, - ['until'] = -1 - }, - [l.COMMENT] = { - ['['] = fold_longcomment, [']'] = fold_longcomment, - ['--'] = l.fold_line_comments('--') - }, - longstring = {['['] = 1, [']'] = -1}, - [l.OPERATOR] = {['('] = 1, ['{'] = 1, [')'] = -1, ['}'] = -1} -} - -return M +lex:add_fold_point(lexer.KEYWORD, 'if', 'end') +lex:add_fold_point(lexer.KEYWORD, 'do', 'end') +lex:add_fold_point(lexer.KEYWORD, 'function', 'end') +lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until') +lex:add_fold_point(lexer.COMMENT, '[', fold_longcomment) +lex:add_fold_point(lexer.COMMENT, ']', fold_longcomment) +lex:add_fold_point(lexer.COMMENT, lexer.fold_consecutive_lines('--')) +lex:add_fold_point('longstring', '[', ']') +lex:add_fold_point(lexer.OPERATOR, '(', ')') +lex:add_fold_point(lexer.OPERATOR, '[', ']') +lex:add_fold_point(lexer.OPERATOR, '{', '}') + +return lex -- cgit v1.2.3