diff options
| -rw-r--r-- | lua/lexers/ada.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/csharp.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/d.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/dart.lua | 68 | ||||
| -rw-r--r-- | lua/lexers/fsharp.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/groovy.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/haskell.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/html.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/ini.lua | 31 | ||||
| -rw-r--r-- | lua/lexers/janet.lua | 171 | ||||
| -rw-r--r-- | lua/lexers/lexer.lua | 5 | ||||
| -rw-r--r-- | lua/lexers/org.lua | 10 | ||||
| -rw-r--r-- | lua/lexers/pascal.lua | 8 | ||||
| -rw-r--r-- | lua/lexers/perl.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/python.lua | 3 | ||||
| -rw-r--r-- | lua/lexers/ruby.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/rust.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/todotxt.lua | 54 | ||||
| -rw-r--r-- | lua/lexers/wsf.lua | 2 | ||||
| -rw-r--r-- | lua/lexers/xml.lua | 5 | ||||
| -rw-r--r-- | lua/lexers/zig.lua | 2 |
21 files changed, 316 insertions, 63 deletions
diff --git a/lua/lexers/ada.lua b/lua/lexers/ada.lua index 77f8302..cd8f315 100644 --- a/lua/lexers/ada.lua +++ b/lua/lexers/ada.lua @@ -42,7 +42,7 @@ lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) lex:add_rule('number', token(lexer.NUMBER, lexer.number_('_'))) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S(':;=<>&+-*/.()'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S(':;=<>&+-*/.()'))) lexer.property['scintillua.comment'] = '--' diff --git a/lua/lexers/csharp.lua b/lua/lexers/csharp.lua index 21bb9e0..954eb7b 100644 --- a/lua/lexers/csharp.lua +++ b/lua/lexers/csharp.lua @@ -50,7 +50,7 @@ lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * word_match('define elif else endif error if line undef warning region endregion'))) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('~!.,:;+-*/<>=\\^|&%?()[]{}'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('~!.,:;+-*/<>=\\^|&%?()[]{}'))) -- Fold points. lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') diff --git a/lua/lexers/d.lua b/lua/lexers/d.lua index a471d97..b96e705 100644 --- a/lua/lexers/d.lua +++ b/lua/lexers/d.lua @@ -74,7 +74,7 @@ lex:add_rule('annotation', lex:tag(lexer.ANNOTATION, '@' * lexer.word^1)) lex:add_rule('preprocessor', lex:tag(lexer.PREPROCESSOR, lexer.to_eol('#'))) -- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('?=!<>+-*$/%&|^~.,;:()[]{}'))) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, '..' + S('?=!<>+-*$/%&|^~.,;:()[]{}'))) -- Fold points. lex:add_fold_point(lexer.OPERATOR, '{', '}') diff --git a/lua/lexers/dart.lua b/lua/lexers/dart.lua index 2634202..48de025 100644 --- a/lua/lexers/dart.lua +++ b/lua/lexers/dart.lua @@ -1,56 +1,76 @@ -- Copyright 2013-2025 Mitchell. See LICENSE. -- Dart LPeg lexer. -- Written by Brian Schott (@Hackerpilot on Github). +-- Migrated by Jamie Drinkell -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match +local lexer = lexer local P, S = lpeg.P, lpeg.S -local lex = lexer.new('dart') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) +local lex = lexer.new(...) -- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default', 'do', 'else', 'enum', - 'extends', 'false', 'final', 'finally', 'for', 'if', 'in', 'is', 'new', 'null', 'rethrow', - 'return', 'super', 'switch', 'this', 'throw', 'true', 'try', 'var', 'void', 'while', 'with' -})) - +lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) -- Built-ins. -lex:add_rule('builtin', token(lexer.CONSTANT, word_match{ - 'abstract', 'as', 'dynamic', 'export', 'external', 'factory', 'get', 'implements', 'import', - 'library', 'operator', 'part', 'set', 'static', 'typedef' -})) +lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) +-- Types. +lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) +-- Directives +lex:add_rule('directive', lex:tag(lexer.PREPROCESSOR, lex:word_match(lexer.PREPROCESSOR))) -- Strings. local sq_str = S('r')^-1 * lexer.range("'", true) local dq_str = S('r')^-1 * lexer.range('"', true) local tq_str = S('r')^-1 * (lexer.range("'''") + lexer.range('"""')) -lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str)) +lex:add_rule('string', lex:tag(lexer.STRING, tq_str + sq_str + dq_str)) + +-- Functions. +lex:add_rule('function', lex:tag(lexer.FUNCTION, lexer.word) * #(lexer.space^0 * '(')) -- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) +lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) -- Comments. local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) +local block_comment = lexer.range('/*', '*/') +lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) -- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) +lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}'))) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}'))) -- Annotations. -lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word^1)) +lex:add_rule('annotation', lex:tag(lexer.ANNOTATION, '@' * lexer.word^1)) --- Fold points. +-- Fold points (add for most bracket pairs due to Flutter's usual formatting). lex:add_fold_point(lexer.OPERATOR, '{', '}') +lex:add_fold_point(lexer.OPERATOR, '(', ')') +lex:add_fold_point(lexer.OPERATOR, '[', ']') lex:add_fold_point(lexer.COMMENT, '/*', '*/') +lex:set_word_list(lexer.KEYWORD, { + 'abstract', 'as', 'assert', 'async', 'await', 'break', 'case', 'catch', 'class', 'continue', + 'covariant', 'default', 'do', 'else', 'enum', 'extends', 'factory', 'finally', 'for', 'get', 'if', + 'implements', 'in', 'interface', 'is', 'mixin', 'on', 'operator', 'rethrow', 'return', 'set', + 'super', 'switch', 'sync', 'this', 'throw', 'try', 'with', 'while', 'yield', -- + 'base', 'extension', 'external', 'late', 'of', 'required', 'sealed', 'when' +}) + +lex:set_word_list(lexer.PREPROCESSOR, { + 'deferred', 'export', 'hide', 'import', 'library', 'part' +}) + +lex:set_word_list(lexer.CONSTANT_BUILTIN, { + 'false', 'true', 'null' +}) + +lex:set_word_list(lexer.TYPE, { + 'const', 'dynamic', 'final', 'Function', 'new', 'static', 'typedef', 'var', 'void', 'int', + 'double', 'String', 'bool', 'List', 'Set', 'Map', 'Future', 'Stream', 'Iterable', 'Object', + 'Null', 'type' +}) + lexer.property['scintillua.comment'] = '//' return lex diff --git a/lua/lexers/fsharp.lua b/lua/lexers/fsharp.lua index 7dd842a..99e1cab 100644 --- a/lua/lexers/fsharp.lua +++ b/lua/lexers/fsharp.lua @@ -52,7 +52,7 @@ lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') * S('\t word_match('else endif endregion if ifdef ifndef light region'))) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/^.,:;~!@#%^&|?[](){}'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('=<>+-*/^.,:;~!@#%^&|?[](){}'))) lexer.property['scintillua.comment'] = '//' diff --git a/lua/lexers/groovy.lua b/lua/lexers/groovy.lua index ddd7c78..234e526 100644 --- a/lua/lexers/groovy.lua +++ b/lua/lexers/groovy.lua @@ -57,7 +57,7 @@ lex:add_rule('string', string + regex) lex:add_rule('number', token(lexer.NUMBER, lexer.number)) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=~|!<>+-/*?&.,:;()[]{}'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('=~|!<>+-/*?&.,:;()[]{}'))) -- Fold points. lex:add_fold_point(lexer.OPERATOR, '{', '}') diff --git a/lua/lexers/haskell.lua b/lua/lexers/haskell.lua index e4a9bfd..020619a 100644 --- a/lua/lexers/haskell.lua +++ b/lua/lexers/haskell.lua @@ -33,7 +33,7 @@ lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) -- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, op)) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, '..' + op)) lexer.property['scintillua.comment'] = '--' diff --git a/lua/lexers/html.lua b/lua/lexers/html.lua index ed06805..2f3ec6b 100644 --- a/lua/lexers/html.lua +++ b/lua/lexers/html.lua @@ -60,6 +60,7 @@ lex:add_rule('entity', lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', '&' * (lexer.any - lexer.space - ';')^1 * ';')) -- Fold points. +lex:add_fold_point(lexer.COMMENT, '<!--', '-->') local function disambiguate_lt(text, pos, line, s) if line:find('/>', s) then return 0 @@ -70,7 +71,6 @@ local function disambiguate_lt(text, pos, line, s) end end lex:add_fold_point(lexer.TAG .. '.chars', '<', disambiguate_lt) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') -- Tags that start embedded languages. -- Export these patterns for proxy lexers (e.g. ASP) that need them. diff --git a/lua/lexers/ini.lua b/lua/lexers/ini.lua index 31fc4bb..0477b8d 100644 --- a/lua/lexers/ini.lua +++ b/lua/lexers/ini.lua @@ -1,38 +1,37 @@ -- Copyright 2006-2025 Mitchell. See LICENSE. -- Ini LPeg lexer. -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S +local lexer = lexer +local S = lpeg.S -local lex = lexer.new('ini') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) +local lex = lexer.new(...) -- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match('true false on off yes no'))) +lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) -- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_.'))^0)) +lex:add_rule('identifier', + lex:tag(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_. '))^0)) -- Strings. local sq_str = lexer.range("'") local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) +lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) --- Labels. -lex:add_rule('label', token(lexer.LABEL, lexer.range('[', ']', true))) +-- Section labels/headers. +lex:add_rule('label', lex:tag(lexer.LABEL, lexer.range('[', ']', true))) -- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(lexer.starts_line(S(';#'))))) +lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol(lexer.starts_line(S(';#'))))) -- Numbers. -local integer = S('+-')^-1 * (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) -lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) +lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.float + lexer.integer)) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, '=')) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=:'))) + +-- Word lists +lex:set_word_list(lexer.KEYWORD, 'true false on off yes no') lexer.property['scintillua.comment'] = '#' diff --git a/lua/lexers/janet.lua b/lua/lexers/janet.lua new file mode 100644 index 0000000..ec91f83 --- /dev/null +++ b/lua/lexers/janet.lua @@ -0,0 +1,171 @@ +-- Copyright 2025 Jason Lenz code@engenforge.com. See LICENSE. +-- Janet LPeg lexer. + +local lexer = lexer +local P, S, B = lpeg.P, lpeg.S, lpeg.B + +local lex = lexer.new(...) + +-- Much of the lexical syntax defined below was derived from the following two +-- Janet documentation links: +-- https://janet-lang.org/docs/syntax.html +-- https://janet-lang.org/api/index.html + +-- Note: In some cases Janet documentation uses terminology that differs with +-- Scintillua typical usage. For example, the Janet documentation defines +-- keywords as symbols that begin with the character ':' and are treated by the +-- compiler as constants. In this case they were tagged in Scintillua as +-- constants. Conversely, keywords in Scintillua were defined as built in names +-- reserved by the janet compiler such as nil, true, do, fn, etc. + +-- Keywords. +local shorthand = S("',;~|") +local lead_ch = S('([{') + shorthand + lexer.space +local trail_ch = S(')]}') + lexer.space +lex:add_rule('keyword', + lex:tag(lexer.KEYWORD, B(lead_ch) * lex:word_match(lexer.KEYWORD) * #trail_ch)) + +-- Functions. +lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, + B(lead_ch) * lex:word_match(lexer.FUNCTION_BUILTIN) * #trail_ch)) + +-- Numbers. +lex:add_rule('number', lex:tag(lexer.NUMBER, + B(lead_ch) * S('-+')^-1 * lexer.digit^1 * (S('._&') + lexer.alnum)^0)) + +-- Operators. +lex:add_rule('operator', lex:tag(lexer.OPERATOR, S("()[]{}") + shorthand + P('@') * S('([{'))) + +-- Constants. +local id_ch = S('!@$%^&*:-_+=<>.?') + lexer.alnum -- + lpeg.utfR(0x7F, 0x10FFFF) +lex:add_rule('constant', lex:tag(lexer.CONSTANT, ':' * id_ch^0)) + +-- Strings and buffers. +local dq_str = lexer.range('"', false, true) +local bt_str = lpeg.Cmt(P('`')^1, function(input, index, bt) + local _, e = input:find(bt, index) + return (e or #input) + 1 +end) +lex:add_rule('string', lex:tag(lexer.STRING, P('@')^-1 * (dq_str + bt_str))) + +-- Identifiers. +lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, id_ch^1)) + +-- Comments. +lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) + +-- Fold points. +lex:add_fold_point(lexer.OPERATOR, '(', ')') +lex:add_fold_point(lexer.OPERATOR, '[', ']') +lex:add_fold_point(lexer.OPERATOR, '{', '}') + +-- Word lists. +lex:set_word_list(lexer.KEYWORD, { + 'break', 'def', 'do', 'false', 'fn', 'if', 'nil', 'quasiquote', 'quote', 'set', 'slice', 'true', + 'unquote', 'var', 'while' +}) + +lex:set_word_list(lexer.FUNCTION_BUILTIN, { + '%', '%=', '*', '*=', '*args*', '*current-file*', '*debug*', '*defdyn-prefix*', '*doc-color*', + '*doc-width*', '*err*', '*err-color*', '*executable*', '*exit*', '*exit-value*', '*ffi-context*', + '*lint-error*', '*lint-levels*', '*lint-warn*', '*macro-form*', '*macro-lints*', '*module-cache*', + '*module-loaders*', '*module-loading*', '*module-make-env*', '*module-paths*', '*out*', + '*peg-grammar*', '*pretty-format*', '*profilepath*', '*redef*', '*repl-prompt*', '*syspath*', + '*task-id*', '+', '++', '+=', '-', '--', '-=', '->', '->>', '-?>', '-?>>', '/', '/=', '<', '<=', + '=', '>', '>=', 'abstract?', 'accumulate', 'accumulate2', 'all', 'all-bindings', 'all-dynamics', + 'and', 'any?', 'apply', 'array', 'array/clear', 'array/concat', 'array/ensure', 'array/fill', + 'array/insert', 'array/join', 'array/new', 'array/new-filled', 'array/peek', 'array/pop', + 'array/push', 'array/remove', 'array/slice', 'array/trim', 'array/weak', 'array?', 'as->', + 'as-macro', 'as?->', 'asm', 'assert', 'assertf', 'bad-compile', 'bad-parse', 'band', 'blshift', + 'bnot', 'boolean?', 'bor', 'brshift', 'brushift', 'buffer', 'buffer/bit', 'buffer/bit-clear', + 'buffer/bit-set', 'buffer/bit-toggle', 'buffer/blit', 'buffer/clear', 'buffer/fill', + 'buffer/format', 'buffer/format-at', 'buffer/from-bytes', 'buffer/new', 'buffer/new-filled', + 'buffer/popn', 'buffer/push', 'buffer/push-at', 'buffer/push-byte', 'buffer/push-float32', + 'buffer/push-float64', 'buffer/push-string', 'buffer/push-uint16', 'buffer/push-uint32', + 'buffer/push-uint64', 'buffer/push-word', 'buffer/slice', 'buffer/trim', 'buffer?', 'bundle/add', + 'bundle/add-bin', 'bundle/add-directory', 'bundle/add-file', 'bundle/install', + 'bundle/installed?', 'bundle/list', 'bundle/manifest', 'bundle/prune', 'bundle/reinstall', + 'bundle/replace', 'bundle/topolist', 'bundle/uninstall', 'bundle/update-all', 'bundle/whois', + 'bxor', 'bytes?', 'cancel', 'case', 'catseq', 'cfunction?', 'chr', 'cli-main', 'cmp', 'comment', + 'comp', 'compare', 'compare<', 'compare<=', 'compare=', 'compare>', 'compare>=', 'compif', + 'compile', 'complement', 'comptime', 'compwhen', 'cond', 'coro', 'count', 'curenv', 'debug', + 'debug/arg-stack', 'debug/break', 'debug/fbreak', 'debug/lineage', 'debug/stack', + 'debug/stacktrace', 'debug/step', 'debug/unbreak', 'debug/unfbreak', 'debugger', 'debugger-env', + 'debugger-on-status', 'dec', 'deep-not=', 'deep=', 'def-', 'default', 'default-peg-grammar', + 'defdyn', 'defer', 'defglobal', 'defmacro', 'defmacro-', 'defn', 'defn-', 'delay', 'describe', + 'dictionary?', 'disasm', 'distinct', 'div', 'doc', 'doc*', 'doc-format', 'doc-of', 'dofile', + 'drop', 'drop-until', 'drop-while', 'dyn', 'each', 'eachk', 'eachp', 'edefer', 'eflush', 'empty?', + 'env-lookup', 'eprin', 'eprinf', 'eprint', 'eprintf', 'error', 'errorf', 'ev/acquire-lock', + 'ev/acquire-rlock', 'ev/acquire-wlock', 'ev/all-tasks', 'ev/call', 'ev/cancel', 'ev/capacity', + 'ev/chan', 'ev/chan-close', 'ev/chunk', 'ev/close', 'ev/count', 'ev/deadline', 'ev/do-thread', + 'ev/full', 'ev/gather', 'ev/give', 'ev/give-supervisor', 'ev/go', 'ev/lock', 'ev/read', + 'ev/release-lock', 'ev/release-rlock', 'ev/release-wlock', 'ev/rselect', 'ev/rwlock', 'ev/select', + 'ev/sleep', 'ev/spawn', 'ev/spawn-thread', 'ev/take', 'ev/thread', 'ev/thread-chan', 'ev/to-file', + 'ev/with-deadline', 'ev/with-lock', 'ev/with-rlock', 'ev/with-wlock', 'ev/write', 'eval', + 'eval-string', 'even?', 'every?', 'extreme', 'false?', 'ffi/align', 'ffi/call', + 'ffi/calling-conventions', 'ffi/close', 'ffi/context', 'ffi/defbind', 'ffi/defbind-alias', + 'ffi/free', 'ffi/jitfn', 'ffi/lookup', 'ffi/malloc', 'ffi/native', 'ffi/pointer-buffer', + 'ffi/pointer-cfunction', 'ffi/read', 'ffi/signature', 'ffi/size', 'ffi/struct', 'ffi/trampoline', + 'ffi/write', 'fiber-fn', 'fiber/can-resume?', 'fiber/current', 'fiber/getenv', 'fiber/last-value', + 'fiber/maxstack', 'fiber/new', 'fiber/root', 'fiber/setenv', 'fiber/setmaxstack', 'fiber/status', + 'fiber?', 'file/close', 'file/flush', 'file/lines', 'file/open', 'file/read', 'file/seek', + 'file/tell', 'file/temp', 'file/write', 'filewatch/add', 'filewatch/listen', 'filewatch/new', + 'filewatch/remove', 'filewatch/unlisten', 'filter', 'find', 'find-index', 'first', 'flatten', + 'flatten-into', 'flush', 'flycheck', 'for', 'forever', 'forv', 'freeze', 'frequencies', + 'from-pairs', 'function?', 'gccollect', 'gcinterval', 'gcsetinterval', 'generate', 'gensym', + 'geomean', 'get', 'get-in', 'getline', 'getproto', 'group-by', 'has-key?', 'has-value?', 'hash', + 'idempotent?', 'identity', 'if-let', 'if-not', 'if-with', 'import', 'import*', 'in', 'inc', + 'index-of', 'indexed?', 'int/s64', 'int/to-bytes', 'int/to-number', 'int/u64', 'int?', + 'interleave', 'interpose', 'invert', 'janet/build', 'janet/config-bits', 'janet/version', 'juxt', + 'juxt*', 'keep', 'keep-syntax', 'keep-syntax!', 'keys', 'keyword', 'keyword/slice', 'keyword?', + 'kvs', 'label', 'last', 'length', 'lengthable?', 'let', 'load-image', 'load-image-dict', 'loop', + 'macex', 'macex1', 'maclintf', 'make-env', 'make-image', 'make-image-dict', 'map', 'mapcat', + 'marshal', 'match', 'math/-inf', 'math/abs', 'math/acos', 'math/acosh', 'math/asin', 'math/asinh', + 'math/atan', 'math/atan2', 'math/atanh', 'math/cbrt', 'math/ceil', 'math/cos', 'math/cosh', + 'math/e', 'math/erf', 'math/erfc', 'math/exp', 'math/exp2', 'math/expm1', 'math/floor', + 'math/frexp', 'math/gamma', 'math/gcd', 'math/hypot', 'math/inf', 'math/int-max', 'math/int-min', + 'math/int32-max', 'math/int32-min', 'math/lcm', 'math/ldexp', 'math/log', 'math/log-gamma', + 'math/log10', 'math/log1p', 'math/log2', 'math/nan', 'math/next', 'math/pi', 'math/pow', + 'math/random', 'math/rng', 'math/rng-buffer', 'math/rng-int', 'math/rng-uniform', 'math/round', + 'math/seedrandom', 'math/sin', 'math/sinh', 'math/sqrt', 'math/tan', 'math/tanh', 'math/trunc', + 'max', 'max-of', 'mean', 'memcmp', 'merge', 'merge-into', 'merge-module', 'min', 'min-of', 'mod', + 'module/add-paths', 'module/cache', 'module/expand-path', 'module/find', 'module/loaders', + 'module/loading', 'module/paths', 'module/value', 'nan?', 'nat?', 'native', 'neg?', 'net/accept', + 'net/accept-loop', 'net/address', 'net/address-unpack', 'net/chunk', 'net/close', 'net/connect', + 'net/flush', 'net/listen', 'net/localname', 'net/peername', 'net/read', 'net/recv-from', + 'net/send-to', 'net/server', 'net/setsockopt', 'net/shutdown', 'net/write', 'next', 'nil?', 'not', + 'not=', 'number?', 'odd?', 'one?', 'or', 'os/arch', 'os/cd', 'os/chmod', 'os/clock', + 'os/compiler', 'os/cpu-count', 'os/cryptorand', 'os/cwd', 'os/date', 'os/dir', 'os/environ', + 'os/execute', 'os/exit', 'os/getenv', 'os/isatty', 'os/link', 'os/lstat', 'os/mkdir', 'os/mktime', + 'os/open', 'os/perm-int', 'os/perm-string', 'os/pipe', 'os/posix-exec', 'os/posix-fork', + 'os/proc-close', 'os/proc-kill', 'os/proc-wait', 'os/readlink', 'os/realpath', 'os/rename', + 'os/rm', 'os/rmdir', 'os/setenv', 'os/setlocale', 'os/shell', 'os/sigaction', 'os/sleep', + 'os/spawn', 'os/stat', 'os/strftime', 'os/symlink', 'os/time', 'os/touch', 'os/umask', 'os/which', + 'pairs', 'parse', 'parse-all', 'parser/byte', 'parser/clone', 'parser/consume', 'parser/eof', + 'parser/error', 'parser/flush', 'parser/has-more', 'parser/insert', 'parser/new', + 'parser/produce', 'parser/state', 'parser/status', 'parser/where', 'partial', 'partition', + 'partition-by', 'peg/compile', 'peg/find', 'peg/find-all', 'peg/match', 'peg/replace', + 'peg/replace-all', 'pos?', 'postwalk', 'pp', 'prewalk', 'prin', 'prinf', 'print', 'printf', + 'product', 'prompt', 'propagate', 'protect', 'put', 'put-in', 'quit', 'range', 'reduce', + 'reduce2', 'repeat', 'repl', 'require', 'resume', 'return', 'reverse', 'reverse!', 'root-env', + 'run-context', 'sandbox', 'scan-number', 'seq', 'setdyn', 'short-fn', 'signal', 'slice', 'slurp', + 'some', 'sort', 'sort-by', 'sorted', 'sorted-by', 'spit', 'stderr', 'stdin', 'stdout', 'string', + 'string/ascii-lower', 'string/ascii-upper', 'string/bytes', 'string/check-set', 'string/find', + 'string/find-all', 'string/format', 'string/from-bytes', 'string/has-prefix?', + 'string/has-suffix?', 'string/join', 'string/repeat', 'string/replace', 'string/replace-all', + 'string/reverse', 'string/slice', 'string/split', 'string/trim', 'string/triml', 'string/trimr', + 'string?', 'struct', 'struct/getproto', 'struct/proto-flatten', 'struct/rawget', + 'struct/to-table', 'struct/with-proto', 'struct?', 'sum', 'symbol', 'symbol/slice', 'symbol?', + 'table', 'table/clear', 'table/clone', 'table/getproto', 'table/new', 'table/proto-flatten', + 'table/rawget', 'table/setproto', 'table/to-struct', 'table/weak', 'table/weak-keys', + 'table/weak-values', 'table?', 'tabseq', 'take', 'take-until', 'take-while', 'thaw', 'toggle', + 'trace', 'tracev', 'true?', 'truthy?', 'try', 'tuple', 'tuple/brackets', 'tuple/join', + 'tuple/setmap', 'tuple/slice', 'tuple/sourcemap', 'tuple/type', 'tuple?', 'type', 'unless', + 'unmarshal', 'untrace', 'update', 'update-in', 'use', 'values', 'var-', 'varfn', 'varglobal', + 'walk', 'warn-compile', 'when', 'when-let', 'when-with', 'with', 'with-dyns', 'with-env', + 'with-syms', 'with-vars', 'xprin', 'xprinf', 'xprint', 'xprintf', 'yield', 'zero?', 'zipcoll' +}) + +lexer.property['scintillua.comment'] = '#' + +return lex diff --git a/lua/lexers/lexer.lua b/lua/lexers/lexer.lua index 509a5e2..7251ed2 100644 --- a/lua/lexers/lexer.lua +++ b/lua/lexers/lexer.lua @@ -1742,6 +1742,7 @@ function M.detect(filename, line) ni = 'inform', -- cfg = 'ini', cnf = 'ini', inf = 'ini', ini = 'ini', reg = 'ini', -- io = 'io_lang', -- + janet = 'janet', -- bsh = 'java', java = 'java', -- js = 'javascript', jsfl = 'javascript', -- jq = 'jq', -- @@ -1771,7 +1772,7 @@ function M.detect(filename, line) m = 'objective_c', mm = 'objective_c', objc = 'objective_c', -- caml = 'caml', ml = 'caml', mli = 'caml', mll = 'caml', mly = 'caml', -- org = 'org', -- - dpk = 'pascal', dpr = 'pascal', p = 'pascal', pas = 'pascal', -- + dpk = 'pascal', dpr = 'pascal', p = 'pascal', pas = 'pascal', pp = 'pascal', -- al = 'perl', perl = 'perl', pl = 'perl', pm = 'perl', pod = 'perl', -- inc = 'php', php = 'php', php3 = 'php', php4 = 'php', phtml = 'php', -- p8 = 'pico8', -- @@ -1811,6 +1812,8 @@ function M.detect(filename, line) taskpaper = 'taskpaper', -- tcl = 'tcl', tk = 'tcl', -- texi = 'texinfo', -- + ['todo.txt'] = 'todotxt', ['Todo.txt'] = 'todotxt', ['done.txt'] = 'todotxt', + ['Done.txt'] = 'todotxt', -- toml = 'toml', -- ['1'] = 'troff', ['2'] = 'troff', ['3'] = 'troff', ['4'] = 'troff', ['5'] = 'troff', ['6'] = 'troff', ['7'] = 'troff', ['8'] = 'troff', ['9'] = 'troff', ['1x'] = 'troff', diff --git a/lua/lexers/org.lua b/lua/lexers/org.lua index 4ae38c5..14e2af4 100644 --- a/lua/lexers/org.lua +++ b/lua/lexers/org.lua @@ -31,9 +31,9 @@ lex:add_rule('italic', lex:tag('ITALIC', '/' * lexer.word * '/')) lex:add_rule('underline', lex:tag('UNDERLINE', '_' * lexer.alnum * '_')) -- ToDos. -lex:add_rule('todo', lex:tag('TODO', lex:word_match('TODO'))) +lex:add_rule('settled', lex:tag('SETTLED', lex:word_match('SETTLED'))) lex:add_rule('done', lex:tag('DONE', lex:word_match('DONE'))) -lex:add_rule('wontfix', lex:tag('WONTFIX', lex:word_match('WONTFIX'))) +lex:add_rule('waiting', lex:tag('WAITING', lex:word_match('WAITING'))) -- DateTime. local DD = lexer.digit * lexer.digit @@ -75,11 +75,11 @@ local block_comment = lexer.range(lexer.starts_line('#+BEGIN_COMMENT'), lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) -- Word lists. -lex:set_word_list('TODO', {'TODO', 'DELEGATED', 'WAITING'}) +lex:set_word_list('SETTLED', {'TODO', 'DELEGATED'}) -lex:set_word_list('DONE', {'DONE'}) +lex:set_word_list('DONE', {'DONE', 'INVALID', 'WONTFIX'}) -lex:set_word_list('WONTFIX', {'WONTFIX', 'INVALID'}) +lex:set_word_list('WAITING', {'WAITING'}) lex:set_word_list('wday', { 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun', 'Po', diff --git a/lua/lexers/pascal.lua b/lua/lexers/pascal.lua index b0c9c8a..0435387 100644 --- a/lua/lexers/pascal.lua +++ b/lua/lexers/pascal.lua @@ -54,10 +54,14 @@ local pblock_comment = lexer.range('(*', '*)') lex:add_rule('comment', token(lexer.COMMENT, line_comment + bblock_comment + pblock_comment)) -- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlDdFf')^-1)) +local hex_num = '$' * lexer.xdigit^1 +local oct_num = '&' * lpeg.R('07')^1 +local bin_num = '%' * S('01')^1 +local integer = hex_num + oct_num + bin_num + lexer.integer +lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('.,;^@:=<>+-/*()[]'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('.,;^@:=<>+-/*()[]'))) lexer.property['scintillua.comment'] = '//' diff --git a/lua/lexers/perl.lua b/lua/lexers/perl.lua index d6d58a2..6efae38 100644 --- a/lua/lexers/perl.lua +++ b/lua/lexers/perl.lua @@ -112,7 +112,7 @@ local plain_var = ('$#' + S('$@%')) * P('$')^0 * lexer.word + '$#' lex:add_rule('variable', lex:tag(lexer.VARIABLE, special_var + plain_var)) -- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('-<>+*!~\\=/%&|^.,?:;()[]{}'))) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, '..' + S('-<>+*!~\\=/%&|^.,?:;()[]{}'))) -- Fold points. lex:add_fold_point(lexer.OPERATOR, '[', ']') diff --git a/lua/lexers/python.lua b/lua/lexers/python.lua index b9bab9a..5cf936e 100644 --- a/lua/lexers/python.lua +++ b/lua/lexers/python.lua @@ -30,10 +30,11 @@ local attr = lex:tag(lexer.ATTRIBUTE, B('.') * lex:word_match(lexer.ATTRIBUTE) + lex:add_rule('constant', builtin_const + attr) -- Strings. +-- Note: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals local sq_str = lexer.range("'", true) local dq_str = lexer.range('"', true) local tq_str = lexer.range("'''") + lexer.range('"""') -lex:add_rule('string', lex:tag(lexer.STRING, (S('fFrRbBrR') * S('rRfFrRbB') + S('ruRUfFbB'))^-1 * +lex:add_rule('string', lex:tag(lexer.STRING, (S('rRbBfFtT') * S('rRbBfFtT') + S('rRuUbBfFtT'))^-1 * (tq_str + sq_str + dq_str))) -- Identifiers. diff --git a/lua/lexers/ruby.lua b/lua/lexers/ruby.lua index 2927936..e1bb9be 100644 --- a/lua/lexers/ruby.lua +++ b/lua/lexers/ruby.lua @@ -80,7 +80,7 @@ lex:add_rule('symbol', lex:tag(lexer.STRING .. '.symbol', ':' * P(function(input end) * (word_char^1 + sq_str + dq_str))) -- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~'))) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, P('...') + '..' + S('!%^&*()[]{}-=+/|:;.,?<>~'))) -- Fold points. local function disambiguate(text, pos, line, s) diff --git a/lua/lexers/rust.lua b/lua/lexers/rust.lua index c7c3e49..752d646 100644 --- a/lua/lexers/rust.lua +++ b/lua/lexers/rust.lua @@ -50,7 +50,7 @@ lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_'))) lex:add_rule('preprocessor', lex:tag(lexer.PREPROCESSOR, '#' * lexer.range('[', ']', true))) -- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=`^~@&|?#~:;,.()[]{}'))) +lex:add_rule('operator', lex:tag(lexer.OPERATOR, '..' + S('+-/*%<>!=`^~@&|?#~:;,.()[]{}'))) -- Fold points. lex:add_fold_point(lexer.COMMENT, '/*', '*/') diff --git a/lua/lexers/todotxt.lua b/lua/lexers/todotxt.lua new file mode 100644 index 0000000..a6d7565 --- /dev/null +++ b/lua/lexers/todotxt.lua @@ -0,0 +1,54 @@ +-- Copyright 2025 Chris Clark and Mitchell. See LICENSE. +-- todo.txt LPeg lexer. +-- https://github.com/too-much-todotxt/spec + +local lexer = lexer +local P, S = lpeg.P, lpeg.S + +local lex = lexer.new(...) + +-- Done/Complete items. +lex:add_rule('done', lex:tag(lexer.COMMENT, lexer.starts_line(lexer.to_eol('x ')))) + +-- Priority. +local priority = P(false) +for letter in string.gmatch('abcdefghijklmnopqrstuvwxyz', '.') do + local tag = lex:tag(lexer.LIST .. '.priority.' .. letter, + lexer.starts_line('(' .. letter:upper() .. ') ')) + priority = priority + tag +end +lex:add_rule('priority', priority) + +-- URLs, emails, and domain names. +-- Note: this is not part of todo.txt, but is an extension to make editing cleaner. +local nonspace = lexer.any - lexer.space +local email = lex:tag(lexer.LINK, + (nonspace - '@')^1 * '@' * (nonspace - '.')^1 * ('.' * (nonspace - S('.?'))^1)^1 * + ('?' * nonspace^1)^-1) +local host = lex:tag(lexer.LINK, + lexer.word_match('www ftp', true) * (nonspace - '.')^0 * '.' * (nonspace - '.')^1 * '.' * + (nonspace - S(',.'))^1) +local url = lex:tag(lexer.LINK, + (nonspace - '://')^1 * '://' * (nonspace - ',' - '.')^1 * ('.' * (nonspace - S(',./?#'))^1)^1 * + ('/' * (nonspace - S('./?#'))^0 * ('.' * (nonspace - S(',.?#'))^1)^0)^0 * + ('?' * (nonspace - '#')^1)^-1 * ('#' * nonspace^0)^-1) +local link = url + host + email +lex:add_rule('link', link) + +-- Key-value pairs. +local word = (lexer.any - lexer.space - P(':'))^1 +local key = lex:tag(lexer.KEYWORD, word) +local colon = lex:tag(lexer.OPERATOR, P(':')) +local value = lex:tag(lexer.STRING, word) +lex:add_rule('key_value', key * colon * value) + +-- Dates. +lex:add_rule('date', lex:tag(lexer.NUMBER, lexer.digit^4 * P('-') * lexer.digit^2 * P('-') * + lexer.digit^2 * (#lexer.space + P(-1)))) + +-- Project + +lex:add_rule('project', lex:tag(lexer.LABEL, lexer.range('+', lexer.space, true))) +-- Context @ +lex:add_rule('context', lex:tag(lexer.TYPE, lexer.range('@', lexer.space, true))) + +return lex diff --git a/lua/lexers/wsf.lua b/lua/lexers/wsf.lua index 271082d..899a6f9 100644 --- a/lua/lexers/wsf.lua +++ b/lua/lexers/wsf.lua @@ -54,10 +54,10 @@ local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') lex:add_rule('entity', predefined + general) -- Fold points. +lex:add_fold_point(lexer.COMMENT, '<!--', '-->') local function disambiguate_lt(text, pos, line, s) return not line:find('^</', s) and 1 or -1 end lex:add_fold_point(lexer.TAG, '<', disambiguate_lt) lex:add_fold_point(lexer.TAG, '/>', -1) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') -- Finally, add JavaScript and VBScript as embedded languages diff --git a/lua/lexers/xml.lua b/lua/lexers/xml.lua index 8f297e9..64932cc 100644 --- a/lua/lexers/xml.lua +++ b/lua/lexers/xml.lua @@ -61,11 +61,12 @@ local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') lex:add_rule('entity', predefined + general) -- Fold Points. +lex:add_fold_point(lexer.COMMENT, '<!--', '-->') +lex:add_fold_point('cdata', '<![CDATA[', ']]>') local function disambiguate_lt(text, pos, line, s) return not line:find('^</', s) and 1 or -1 end lex:add_fold_point(lexer.TAG, '<', disambiguate_lt) lex:add_fold_point(lexer.TAG, '/>', -1) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') -lex:add_fold_point('cdata', '<![CDATA[', ']]>') +lex:add_fold_point(lexer.TAG, '?>', -1) lexer.property['scintillua.comment'] = '<!--|-->' lexer.property['scintillua.angle.braces'] = '1' diff --git a/lua/lexers/zig.lua b/lua/lexers/zig.lua index 695613f..49be636 100644 --- a/lua/lexers/zig.lua +++ b/lua/lexers/zig.lua @@ -83,7 +83,7 @@ lex:add_rule('comment', token(lexer.COMMENT, doc_comment + comment)) lex:add_rule('number', token(lexer.NUMBER, lexer.number)) -- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}'))) +lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('+-/*%<>!=^&|?~:;,.()[]{}'))) -- Fold points. lex:add_fold_point(lexer.OPERATOR, '{', '}') |
