aboutsummaryrefslogtreecommitdiff
path: root/lexers/toml.lua
diff options
context:
space:
mode:
authorMarc André Tanner <mat@brain-dump.org>2015-10-16 12:36:47 +0200
committerMarc André Tanner <mat@brain-dump.org>2015-11-08 13:35:36 +0100
commit039042f2e323c1f982f1de61b702c88fb33d6ccb (patch)
tree67dea69de9462e0c27ea2a743b4c5d1798eaa057 /lexers/toml.lua
parentb1ec60061623601ca6185a16d77c6c6c62135e95 (diff)
downloadvis-039042f2e323c1f982f1de61b702c88fb33d6ccb.tar.gz
vis-039042f2e323c1f982f1de61b702c88fb33d6ccb.tar.xz
Import LPeg based lexers from Scintillua 3.6.1-1
These are Copyright (c) 2007-2015 Mitchell and released under the MIT license.
Diffstat (limited to 'lexers/toml.lua')
-rw-r--r--lexers/toml.lua68
1 files changed, 68 insertions, 0 deletions
diff --git a/lexers/toml.lua b/lexers/toml.lua
new file mode 100644
index 0000000..d7d2960
--- /dev/null
+++ b/lexers/toml.lua
@@ -0,0 +1,68 @@
+-- Copyright 2015 Alejandro Baez (https://twitter.com/a_baez). See LICENSE.
+-- TOML LPeg lexer.
+
+local l = require("lexer")
+local token, word_match = l.token, l.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'toml'}
+
+-- Whitespace
+local indent = #l.starts_line(S(' \t')) *
+ (token(l.WHITESPACE, ' ') + token('indent_error', '\t'))^1
+local ws = token(l.WHITESPACE, S(' \t')^1 + l.newline^1)
+
+-- Comments.
+local comment = token(l.COMMENT, '#' * l.nonnewline^0)
+
+-- Strings.
+local string = token(l.STRING, l.delimited_range("'") + l.delimited_range('"'))
+
+-- Numbers.
+local number = token(l.NUMBER, l.float + l.integer)
+
+-- Datetime.
+local ts = token('timestamp', l.digit * l.digit * l.digit * l.digit * -- year
+ '-' * l.digit * l.digit^-1 * -- month
+ '-' * l.digit * l.digit^-1 * -- day
+ ((S(' \t')^1 + S('tT'))^-1 * -- separator
+ l.digit * l.digit^-1 * -- hour
+ ':' * l.digit * l.digit * -- minute
+ ':' * l.digit * l.digit * -- second
+ ('.' * l.digit^0)^-1 * -- fraction
+ ('Z' + -- timezone
+ S(' \t')^0 * S('-+') * l.digit * l.digit^-1 *
+ (':' * l.digit * l.digit)^-1)^-1)^-1)
+
+-- kewwords.
+local keyword = token(l.KEYWORD, word_match{
+ 'true', 'false'
+})
+
+
+-- Identifiers.
+local identifier = token(l.IDENTIFIER, l.word)
+
+-- Operators.
+local operator = token(l.OPERATOR, S('#=+-,.{}[]()'))
+
+M._rules = {
+ {'indent', indent},
+ {'whitespace', ws},
+ {'keyword', keyword},
+ {'identifier', identifier},
+ {'operator', operator},
+ {'string', string},
+ {'comment', comment},
+ {'number', number},
+ {'timestamp', ts},
+}
+
+M._tokenstyles = {
+ indent_error = 'back:%(color.red)',
+ timestamp = l.STYLE_NUMBER,
+}
+
+M._FOLDBYINDENTATION = true
+
+return M