From 3570869c9ae2c4df14b15423789919e514322916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc=20Andr=C3=A9=20Tanner?= Date: Wed, 7 Dec 2016 16:49:29 +0100 Subject: Move all lua related files to lua/ subfolder Also remove the lexers sub directory from the Lua search path. As a result we attempt to open fewer files during startup: $ strace -e open -o log ./vis +q config.h && wc -l log In order to avoid having to modifiy all lexers which `require('lexer')` we instead place a symlink in the top level directory. $ ./configure --disable-lua $ rm -rf lua Should result in a source tree with most lua specifc functionality removed. --- lexers/pure.lua | 62 --------------------------------------------------------- 1 file changed, 62 deletions(-) delete mode 100644 lexers/pure.lua (limited to 'lexers/pure.lua') diff --git a/lexers/pure.lua b/lexers/pure.lua deleted file mode 100644 index 256a2dc..0000000 --- a/lexers/pure.lua +++ /dev/null @@ -1,62 +0,0 @@ --- Copyright 2015-2016 David B. Lamkins . See LICENSE. --- pure LPeg lexer, see http://purelang.bitbucket.org/ - -local l = require('lexer') -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local M = {_NAME = 'pure'} - --- Whitespace. -local ws = token(l.WHITESPACE, l.space^1) - --- Comments. -local line_comment = '//' * l.nonnewline^0 -local block_comment = '/*' * (l.any - '*/')^0 * P('*/')^-1 -local comment = token(l.COMMENT, line_comment + block_comment) - --- Strings. -local string = token(l.STRING, l.delimited_range('"', true)) - --- Numbers. -local bin = '0' * S('Bb') * S('01')^1 -local hex = '0' * S('Xx') * (R('09') + R('af') + R('AF'))^1 -local dec = R('09')^1 -local int = (bin + hex + dec) * P('L')^-1 -local rad = P('.') - P('..') -local exp = (S('Ee') * S('+-')^-1 * int)^-1 -local flt = int * (rad * dec)^-1 * exp + int^-1 * rad * dec * exp -local number = token(l.NUMBER, flt + int) - --- Keywords. -local keyword = token(l.KEYWORD, word_match{ - 'namespace', 'with', 'end', 'using', 'interface', 'extern', 'let', 'const', - 'def', 'type', 'public', 'private', 'nonfix', 'outfix', 'infix', 'infixl', - 'infixr', 'prefix', 'postfix', 'if', 'otherwise', 'when', 'case', 'of', - 'then', 'else' -}) - --- Identifiers. -local identifier = token(l.IDENTIFIER, l.word) - --- Operators. -local punct = S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\'') -local dots = P('..') -local operator = token(l.OPERATOR, dots + punct) - --- Pragmas. -local hashbang = l.starts_line('#!') * (l.nonnewline - P('//'))^0 -local pragma = token(l.PREPROCESSOR, hashbang) - -M._rules = { - {'whitespace', ws}, - {'comment', comment}, - {'pragma', pragma}, - {'keyword', keyword}, - {'number', number}, - {'operator', operator}, - {'identifier', identifier}, - {'string', string}, -} - -return M -- cgit v1.2.3