diff options
| author | qiu-x <alex@alexslomka.xyz> | 2022-06-29 07:56:51 +0200 |
|---|---|---|
| committer | Felix Van der Jeugt <felix.vanderjeugt@posteo.net> | 2022-11-29 21:57:18 +0100 |
| commit | 8a420ecc4c1ed50111464ec66901bd983eaf2dbd (patch) | |
| tree | f31d2186cafaee6e7f18d32fe99144c3e8148c00 /lua/lexers/rest.lua | |
| parent | 981b90a203484182feace48471fe2b53dae7676f (diff) | |
| download | vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.gz vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.xz | |
Resync the lexers with Scintillua
- Resync the lexers with Scintillua
- Update the lexer readme
- Update `zenburn` theme to fix some highlighting issues
- lexers: redirect print function to vis:info()
- Fix support for custom style names
- As per error message "lexer.delimited_range() is deprecated, use lexer.range()".
- Remove remaining `lexer.delimited_range()` call
- Set syntax to `nil` if the file type has no matching lexer
- Updated Go lexer for Go 1.18.
- lexers/dsv: convert to new lexer format
(cherry picked from commit 9edbc3cd9ea1d7142b1305840432a3d2739e755a)
- lexers/gemini: disable legacy gemini lexer
This reverts commit 468f9ee1b027a7ce98b1a249fa1af5888feeb989.
It is in legacy format and of questionable quality. Ideally it
should be contributed upstream from where it will eventually
trickle down to us.
- lexers/git-rebase: convert to new lexer format
(cherry picked from commit 4000a4cc9ac4a4c2869dfae772b977a82aee8d8c)
- lexers/strace: convert to new lexer format
(cherry picked from commit e420451320d97eb164f5629c1bcfab0b595be29d)
- lexers/typescript: add new upstream lexer revision 28e2b60
(cherry picked from commit 7326e6deecdaa75fa94ae9ebdb653f9f907b33f2)
- use `package.searchpath` instead of a local `searchpath` function
- Restore `filetype: support filetype detection via hashbang`
- Remove redundant comment
- Restore gemini lexer
Diffstat (limited to 'lua/lexers/rest.lua')
| -rw-r--r-- | lua/lexers/rest.lua | 101 |
1 files changed, 47 insertions, 54 deletions
diff --git a/lua/lexers/rest.lua b/lua/lexers/rest.lua index be5b839..e7bf467 100644 --- a/lua/lexers/rest.lua +++ b/lua/lexers/rest.lua @@ -1,9 +1,9 @@ --- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE. +-- Copyright 2006-2022 Mitchell. See LICENSE. -- reStructuredText LPeg lexer. local l = require('lexer') local token, word_match, starts_line = l.token, l.word_match, l.starts_line -local P, R, S = lpeg.P, lpeg.R, lpeg.S +local P, S = lpeg.P, lpeg.S local M = {_NAME = 'rest'} @@ -15,11 +15,11 @@ local any_indent = S(' \t')^0 local adornment_chars = lpeg.C(S('!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~')) local adornment = lpeg.C(adornment_chars^2 * any_indent) * (l.newline + -1) local overline = lpeg.Cmt(starts_line(adornment), function(input, index, adm, c) - if not adm:find('^%'..c..'+%s*$') then return nil end + if not adm:find('^%' .. c .. '+%s*$') then return nil end local rest = input:sub(index) local lines = 1 for line, e in rest:gmatch('([^\r\n]+)()') do - if lines > 1 and line:match('^(%'..c..'+)%s*$') == adm then + if lines > 1 and line:match('^(%' .. c .. '+)%s*$') == adm then return index + e - 1 end if lines > 3 or #line > #adm then return nil end @@ -28,7 +28,7 @@ local overline = lpeg.Cmt(starts_line(adornment), function(input, index, adm, c) return #input + 1 end) local underline = lpeg.Cmt(starts_line(adornment), function(_, index, adm, c) - local pos = adm:match('^%'..c..'+%s*()$') + local pos = adm:match('^%' .. c .. '+%s*()$') return pos and index - #adm + pos - 1 or nil end) -- Token needs to be a predefined one in order for folder to work. @@ -37,16 +37,15 @@ local title = token(l.CONSTANT, overline + underline) -- Lists. local bullet_list = S('*+-') -- TODO: '•‣⁃', as lpeg does not support UTF-8 local enum_list = P('(')^-1 * - (l.digit^1 + S('ivxlcmIVXLCM')^1 + l.alnum + '#') * S('.)') + (l.digit^1 + S('ivxlcmIVXLCM')^1 + l.alnum + '#') * S('.)') local field_list = ':' * (l.any - ':')^1 * P(':')^-1 local option_word = l.alnum * (l.alnum + '-')^0 local option = S('-/') * option_word * (' ' * option_word)^-1 + - '--' * option_word * ('=' * option_word)^-1 + '--' * option_word * ('=' * option_word)^-1 local option_list = option * (',' * l.space^1 * option)^-1 local list = #(l.space^0 * (S('*+-:/') + enum_list)) * - starts_line(token('list', l.space^0 * (option_list + bullet_list + - enum_list + field_list) * - l.space)) + starts_line(token('list', l.space^0 * + (option_list + bullet_list + enum_list + field_list) * l.space)) -- Literal block. local block = P('::') * (l.newline + -1) * function(input, index) @@ -55,7 +54,7 @@ local block = P('::') * (l.newline + -1) * function(input, index) for pos, indent, line in rest:gmatch('()[ \t]*()([^\r\n]+)') do local no_indent = (indent - pos < level and line ~= ' ' or level == 0) local quoted = no_indent and line:find(quote or '^%s*%W') - if quoted and not quote then quote = '^%s*%'..line:match('^%s*(%W)') end + if quoted and not quote then quote = '^%s*%' .. line:match('^%s*(%W)') end if no_indent and not quoted and pos > 1 then return index + pos - 1 end end return #input + 1 @@ -74,8 +73,7 @@ local footnote = token('footnote_block', prefix * footnote_label * l.space) local citation_label = '[' * word * ']' local citation = token('citation_block', prefix * citation_label * l.space) local link = token('link_block', prefix * '_' * - (l.delimited_range('`') + (P('\\') * 1 + - l.nonnewline - ':')^1) * ':' * l.space) + (l.range('`') + (P('\\') * 1 + l.nonnewline - ':')^1) * ':' * l.space) local markup_block = #prefix * starts_line(footnote + citation + link) -- Directives. @@ -102,8 +100,8 @@ local directive_type = word_match({ 'include', 'raw', 'class', 'role', 'default-role', 'title', 'restructuredtext-test-directive', }, '-') -local known_directive = token('directive', - prefix * directive_type * '::' * l.space) +local known_directive = token('directive', prefix * directive_type * '::' * + l.space) local sphinx_directive_type = word_match({ -- The TOC tree. 'toctree', @@ -115,12 +113,12 @@ local sphinx_directive_type = word_match({ -- Miscellaneous 'sectionauthor', 'index', 'only', 'tabularcolumns' }, '-') -local sphinx_directive = token('sphinx_directive', - prefix * sphinx_directive_type * '::' * l.space) -local unknown_directive = token('unknown_directive', - prefix * word * '::' * l.space) +local sphinx_directive = token('sphinx_directive', prefix * + sphinx_directive_type * '::' * l.space) +local unknown_directive = token('unknown_directive', prefix * word * '::' * + l.space) local directive = #prefix * starts_line(known_directive + sphinx_directive + - unknown_directive) + unknown_directive) -- Sphinx code block. local indented_block = function(input, index) @@ -134,42 +132,37 @@ local indented_block = function(input, index) return #input + 1 end local code_block = prefix * 'code-block::' * S(' \t')^1 * l.nonnewline^0 * - (l.newline + -1) * indented_block + (l.newline + -1) * indented_block local sphinx_block = #prefix * token('code_block', starts_line(code_block)) -- Substitution definitions. -local substitution = #prefix * - token('substitution', - starts_line(prefix * l.delimited_range('|') * - l.space^1 * word * '::' * l.space)) +local substitution = #prefix * token('substitution', + starts_line(prefix * l.range('|') * l.space^1 * word * '::' * l.space)) -- Comments. -local line_comment = prefix * l.nonnewline^0 +local line_comment = l.to_eol(prefix) local bprefix = any_indent * '..' local block_comment = bprefix * l.newline * indented_block -local comment = #bprefix * - token(l.COMMENT, starts_line(line_comment + block_comment)) +local comment = #bprefix * token(l.COMMENT, starts_line(line_comment + + block_comment)) -- Inline markup. -local em = token('em', l.delimited_range('*')) -local strong = token('strong', ('**' * (l.any - '**')^0 * P('**')^-1)) +local em = token('em', l.range('*')) +local strong = token('strong', l.range('**', '**')) local role = token('role', ':' * word * ':' * (word * ':')^-1) -local interpreted = role^-1 * token('interpreted', l.delimited_range('`')) * - role^-1 -local inline_literal = token('inline_literal', - '``' * (l.any - '``')^0 * P('``')^-1) -local link_ref = token('link', - (word + l.delimited_range('`')) * '_' * P('_')^-1 + - '_' * l.delimited_range('`')) +local interpreted = role^-1 * token('interpreted', l.range('`')) * role^-1 +local inline_literal = token('inline_literal', l.range('``', '``')) +local postfix_link = (word + l.range('`')) * '_' * P('_')^-1 +local prefix_link = '_' * l.range('`') +local link_ref = token('link', postfix_link + prefix_link) local footnote_ref = token('footnote', footnote_label * '_') local citation_ref = token('citation', citation_label * '_') -local substitution_ref = token('substitution', l.delimited_range('|', true) * - ('_' * P('_')^-1)^-1) +local substitution_ref = token('substitution', l.range('|', true) * + ('_' * P('_')^-1)^-1) local link = token('link', l.alpha * (l.alnum + S('-.'))^1 * ':' * - (l.alnum + S('/.+-%@'))^1) + (l.alnum + S('/.+-%@'))^1) local inline_markup = (strong + em + inline_literal + link_ref + interpreted + - footnote_ref + citation_ref + substitution_ref + link) * - -l.alnum + footnote_ref + citation_ref + substitution_ref + link) * -l.alnum -- Other. local non_space = token(l.DEFAULT, l.alnum * (l.any - l.space)^0) @@ -193,14 +186,14 @@ M._rules = { M._tokenstyles = { list = l.STYLE_TYPE, - literal_block = l.STYLE_EMBEDDED..',eolfilled', + literal_block = l.STYLE_EMBEDDED .. ',eolfilled', footnote_block = l.STYLE_LABEL, citation_block = l.STYLE_LABEL, link_block = l.STYLE_LABEL, directive = l.STYLE_KEYWORD, - sphinx_directive = l.STYLE_KEYWORD..',bold', - unknown_directive = l.STYLE_KEYWORD..',italics', - code_block = l.STYLE_EMBEDDED..',eolfilled', + sphinx_directive = l.STYLE_KEYWORD .. ',bold', + unknown_directive = l.STYLE_KEYWORD .. ',italics', + code_block = l.STYLE_EMBEDDED .. ',eolfilled', substitution = l.STYLE_VARIABLE, strong = 'bold', em = 'italics', @@ -219,7 +212,7 @@ local sphinx_levels = { -- Section-based folding. M._fold = function(text, start_pos, start_line, start_level) local folds, line_starts = {}, {} - for pos in (text..'\n'):gmatch('().-\r?\n') do + for pos in (text .. '\n'):gmatch('().-\r?\n') do line_starts[#line_starts + 1] = pos end local style_at, CONSTANT, level = l.style_at, l.CONSTANT, start_level @@ -231,7 +224,7 @@ M._fold = function(text, start_pos, start_line, start_level) local c = text:sub(pos, pos) local line_num = start_line + i - 1 folds[line_num] = level - if style_at[start_pos + pos] == CONSTANT and c:find('^[^%w%s]') then + if style_at[start_pos + pos - 1] == CONSTANT and c:find('^[^%w%s]') then local sphinx_level = FOLD_BASE + (sphinx_levels[c] or #sphinx_levels) level = not sphinx and level - 1 or sphinx_level if level < FOLD_BASE then level = FOLD_BASE end @@ -249,11 +242,11 @@ l.property['fold.by.sphinx.convention'] = '0' --[[ Embedded languages. local bash = l.load('bash') local bash_indent_level -local start_rule = #(prefix * 'code-block' * '::' * l.space^1 * 'bash' * - (l.newline + -1)) * sphinx_directive * - token('bash_begin', P(function(input, index) - bash_indent_level = #input:match('^([ \t]*)', index) - return index - end))]] +local start_rule = + #(prefix * 'code-block' * '::' * l.space^1 * 'bash' * (l.newline + -1)) * + sphinx_directive * token('bash_begin', P(function(input, index) + bash_indent_level = #input:match('^([ \t]*)', index) + return index + end))]] return M |
