diff options
| author | qiu-x <alex@alexslomka.xyz> | 2022-06-29 07:56:51 +0200 |
|---|---|---|
| committer | Felix Van der Jeugt <felix.vanderjeugt@posteo.net> | 2022-11-29 21:57:18 +0100 |
| commit | 8a420ecc4c1ed50111464ec66901bd983eaf2dbd (patch) | |
| tree | f31d2186cafaee6e7f18d32fe99144c3e8148c00 /lua/lexers/vcard.lua | |
| parent | 981b90a203484182feace48471fe2b53dae7676f (diff) | |
| download | vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.gz vis-8a420ecc4c1ed50111464ec66901bd983eaf2dbd.tar.xz | |
Resync the lexers with Scintillua
- Resync the lexers with Scintillua
- Update the lexer readme
- Update `zenburn` theme to fix some highlighting issues
- lexers: redirect print function to vis:info()
- Fix support for custom style names
- As per error message "lexer.delimited_range() is deprecated, use lexer.range()".
- Remove remaining `lexer.delimited_range()` call
- Set syntax to `nil` if the file type has no matching lexer
- Updated Go lexer for Go 1.18.
- lexers/dsv: convert to new lexer format
(cherry picked from commit 9edbc3cd9ea1d7142b1305840432a3d2739e755a)
- lexers/gemini: disable legacy gemini lexer
This reverts commit 468f9ee1b027a7ce98b1a249fa1af5888feeb989.
It is in legacy format and of questionable quality. Ideally it
should be contributed upstream from where it will eventually
trickle down to us.
- lexers/git-rebase: convert to new lexer format
(cherry picked from commit 4000a4cc9ac4a4c2869dfae772b977a82aee8d8c)
- lexers/strace: convert to new lexer format
(cherry picked from commit e420451320d97eb164f5629c1bcfab0b595be29d)
- lexers/typescript: add new upstream lexer revision 28e2b60
(cherry picked from commit 7326e6deecdaa75fa94ae9ebdb653f9f907b33f2)
- use `package.searchpath` instead of a local `searchpath` function
- Restore `filetype: support filetype detection via hashbang`
- Remove redundant comment
- Restore gemini lexer
Diffstat (limited to 'lua/lexers/vcard.lua')
| -rw-r--r-- | lua/lexers/vcard.lua | 128 |
1 files changed, 51 insertions, 77 deletions
diff --git a/lua/lexers/vcard.lua b/lua/lexers/vcard.lua index f19473e..2ee82ba 100644 --- a/lua/lexers/vcard.lua +++ b/lua/lexers/vcard.lua @@ -1,97 +1,71 @@ --- Copyright (c) 2015-2017 Piotr Orzechowski [drzewo.org]. See LICENSE. +-- Copyright (c) 2015-2022 Piotr Orzechowski [drzewo.org]. See LICENSE. -- vCard 2.1, 3.0 and 4.0 LPeg lexer. -local l = require('lexer') -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S +local lexer = require('lexer') +local token, word_match = lexer.token, lexer.word_match +local P, S = lpeg.P, lpeg.S -local M = {_NAME = 'vcard'} +local lex = lexer.new('vcard') -- Whitespace. -local ws = token(l.WHITESPACE, l.space^1) +lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) + +-- Begin vCard, end vCard. +lex:add_rule('begin_sequence', token(lexer.KEYWORD, 'BEGIN') * token(lexer.OPERATOR, ':') * + token(lexer.COMMENT, 'VCARD')) +lex:add_rule('end_sequence', token(lexer.KEYWORD, 'END') * token(lexer.OPERATOR, ':') * + token(lexer.COMMENT, 'VCARD')) + +-- vCard version (in v3.0 and v4.0 must appear immediately after BEGIN:VCARD). +lex:add_rule('version_sequence', token(lexer.KEYWORD, 'VERSION') * token(lexer.OPERATOR, ':') * + token(lexer.CONSTANT, lexer.digit^1 * ('.' * lexer.digit^1)^-1)) -- Required properties. -local required_property = token(l.KEYWORD, word_match({ - 'BEGIN', 'END', 'FN', 'N' --[[ Not required in v4.0. ]], 'VERSION' -}, nil, true)) * #P(':') +local required_property = token(lexer.KEYWORD, word_match({ + 'BEGIN', 'END', 'FN', 'VERSION', -- + 'N' -- Not required in v4.0. +}, true)) * #P(':') +lex:add_rule('required_property', required_property) -- Supported properties. -local supported_property = token(l.TYPE, word_match({ - 'ADR', 'AGENT' --[[ Not supported in v4.0. ]], - 'ANNIVERSARY' --[[ Supported in v4.0 only. ]], 'BDAY', - 'CALADRURI' --[[ Supported in v4.0 only. ]], - 'CALURI' --[[ Supported in v4.0 only. ]], 'CATEGORIES', - 'CLASS' --[[ Supported in v3.0 only. ]], - 'CLIENTPIDMAP' --[[ Supported in v4.0 only. ]], 'EMAIL', 'END', - 'FBURL' --[[ Supported in v4.0 only. ]], - 'GENDER' --[[ Supported in v4.0 only. ]], 'GEO', - 'IMPP' --[[ Not supported in v2.1. ]], 'KEY', - 'KIND' --[[ Supported in v4.0 only. ]], - 'LABEL' --[[ Not supported in v4.0. ]], - 'LANG' --[[ Supported in v4.0 only. ]], 'LOGO', - 'MAILER' --[[ Not supported in v4.0. ]], - 'MEMBER' --[[ Supported in v4.0 only. ]], - 'NAME' --[[ Supported in v3.0 only. ]], - 'NICKNAME' --[[ Not supported in v2.1. ]], 'NOTE', 'ORG', 'PHOTO', - 'PRODID' --[[ Not supported in v2.1. ]], - 'PROFILE' --[[ Not supported in v4.0. ]], - 'RELATED' --[[ Supported in v4.0 only. ]], 'REV', 'ROLE', - 'SORT-STRING' --[[ Not supported in v4.0. ]], 'SOUND', 'SOURCE', 'TEL', - 'TITLE', 'TZ', 'UID', 'URL', 'XML' --[[ Supported in v4.0 only. ]] -}, nil, true)) * #S(':;') +local supported_property = token(lexer.TYPE, word_match({ + 'ADR', 'BDAY', 'CATEGORIES', 'EMAIL', 'END', 'GEO', 'KEY', 'LOGO', 'NOTE', 'ORG', 'PHOTO', 'REV', + 'ROLE', 'SOUND', 'SOURCE', 'TEL', 'TITLE', 'TZ', 'UID', 'URL', + -- Supported in v4.0 only. + 'ANNIVERSARY', 'CALADRURI', 'CALURI', 'CLIENTPIDMAP', 'FBURL', 'GENDER', 'KIND', 'LANG', 'MEMBER', + 'RELATED', 'XML', + -- Not supported in v4.0. + 'AGENT', 'LABEL', 'MAILER', 'PROFILE', 'SORT-STRING', + -- Supported in v3.0 only. + 'CLASS', 'NAME', + -- Not supported in v2.1. + 'IMPP', 'NICKNAME', 'PRODID' +}, true)) * #S(':;') +lex:add_rule('supported_property', supported_property) -local identifier = l.alpha^1 * l.digit^0 * (P('-') * l.alnum^1)^0 +-- Group and property. +local identifier = lexer.alpha^1 * lexer.digit^0 * ('-' * lexer.alnum^1)^0 +local property = required_property + supported_property + + lexer.token(lexer.TYPE, S('xX') * '-' * identifier) * #S(':;') +lex:add_rule('group_sequence', token(lexer.CONSTANT, lexer.starts_line(identifier)) * + token(lexer.OPERATOR, '.') * property) -- Extension. -local extension = token(l.TYPE, - l.starts_line(S('xX') * P('-') * identifier * #S(':;'))) +lex:add_rule('extension', + token(lexer.TYPE, lexer.starts_line(S('xX') * '-' * identifier * #S(':;')))) -- Parameter. -local parameter = token(l.IDENTIFIER, l.starts_line(identifier * #S(':='))) + - token(l.STRING, identifier) * #S(':=') +local parameter = (token(lexer.IDENTIFIER, lexer.starts_line(identifier)) + + token(lexer.STRING, identifier)) * #S(':=') +lex:add_rule('parameter', parameter) -- Operators. -local operator = token(l.OPERATOR, S('.:;=')) - --- Group and property. -local group_sequence = token(l.CONSTANT, l.starts_line(identifier)) * - token(l.OPERATOR, P('.')) * - (required_property + supported_property + - l.token(l.TYPE, S('xX') * P('-') * identifier) * - #S(':;')) --- Begin vCard, end vCard. -local begin_sequence = token(l.KEYWORD, P('BEGIN')) * - token(l.OPERATOR, P(':')) * token(l.COMMENT, P('VCARD')) -local end_sequence = token(l.KEYWORD, P('END')) * token(l.OPERATOR, P(':')) * - token(l.COMMENT, P('VCARD')) - --- vCard version (in v3.0 and v4.0 must appear immediately after BEGIN:VCARD). -local version_sequence = token(l.KEYWORD, P('VERSION')) * - token(l.OPERATOR, P(':')) * - token(l.CONSTANT, l.digit^1 * (P('.') * l.digit^1)^-1) +lex:add_rule('operator', token(lexer.OPERATOR, S('.:;='))) -- Data. -local data = token(l.IDENTIFIER, l.any) - --- Rules. -M._rules = { - {'whitespace', ws}, - {'begin_sequence', begin_sequence}, - {'end_sequence', end_sequence}, - {'version_sequence', version_sequence}, - {'group_sequence', group_sequence}, - {'required_property', required_property}, - {'supported_property', supported_property}, - {'extension', extension}, - {'parameter', parameter}, - {'operator', operator}, - {'data', data}, -} +lex:add_rule('data', token(lexer.IDENTIFIER, lexer.any)) --- Folding. -M._foldsymbols = { - _patterns = {'BEGIN', 'END'}, - [l.KEYWORD] = {['BEGIN'] = 1, ['END'] = -1} -} +-- Fold points. +lex:add_fold_point(lexer.KEYWORD, 'BEGIN', 'END') -return M +return lex |
