From 3570869c9ae2c4df14b15423789919e514322916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc=20Andr=C3=A9=20Tanner?= Date: Wed, 7 Dec 2016 16:49:29 +0100 Subject: Move all lua related files to lua/ subfolder Also remove the lexers sub directory from the Lua search path. As a result we attempt to open fewer files during startup: $ strace -e open -o log ./vis +q config.h && wc -l log In order to avoid having to modifiy all lexers which `require('lexer')` we instead place a symlink in the top level directory. $ ./configure --disable-lua $ rm -rf lua Should result in a source tree with most lua specifc functionality removed. --- lexers/toml.lua | 68 --------------------------------------------------------- 1 file changed, 68 deletions(-) delete mode 100644 lexers/toml.lua (limited to 'lexers/toml.lua') diff --git a/lexers/toml.lua b/lexers/toml.lua deleted file mode 100644 index 5a27c97..0000000 --- a/lexers/toml.lua +++ /dev/null @@ -1,68 +0,0 @@ --- Copyright 2015-2016 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- TOML LPeg lexer. - -local l = require("lexer") -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local M = {_NAME = 'toml'} - --- Whitespace -local indent = #l.starts_line(S(' \t')) * - (token(l.WHITESPACE, ' ') + token('indent_error', '\t'))^1 -local ws = token(l.WHITESPACE, S(' \t')^1 + l.newline^1) - --- Comments. -local comment = token(l.COMMENT, '#' * l.nonnewline^0) - --- Strings. -local string = token(l.STRING, l.delimited_range("'") + l.delimited_range('"')) - --- Numbers. -local number = token(l.NUMBER, l.float + l.integer) - --- Datetime. -local ts = token('timestamp', l.digit * l.digit * l.digit * l.digit * -- year - '-' * l.digit * l.digit^-1 * -- month - '-' * l.digit * l.digit^-1 * -- day - ((S(' \t')^1 + S('tT'))^-1 * -- separator - l.digit * l.digit^-1 * -- hour - ':' * l.digit * l.digit * -- minute - ':' * l.digit * l.digit * -- second - ('.' * l.digit^0)^-1 * -- fraction - ('Z' + -- timezone - S(' \t')^0 * S('-+') * l.digit * l.digit^-1 * - (':' * l.digit * l.digit)^-1)^-1)^-1) - --- kewwords. -local keyword = token(l.KEYWORD, word_match{ - 'true', 'false' -}) - - --- Identifiers. -local identifier = token(l.IDENTIFIER, l.word) - --- Operators. -local operator = token(l.OPERATOR, S('#=+-,.{}[]()')) - -M._rules = { - {'indent', indent}, - {'whitespace', ws}, - {'keyword', keyword}, - {'identifier', identifier}, - {'operator', operator}, - {'string', string}, - {'comment', comment}, - {'number', number}, - {'timestamp', ts}, -} - -M._tokenstyles = { - indent_error = 'back:red', - timestamp = l.STYLE_NUMBER, -} - -M._FOLDBYINDENTATION = true - -return M -- cgit v1.2.3