aboutsummaryrefslogtreecommitdiff
path: root/lua/lexers/snobol4.lua
diff options
context:
space:
mode:
Diffstat (limited to 'lua/lexers/snobol4.lua')
-rw-r--r--lua/lexers/snobol4.lua113
1 files changed, 59 insertions, 54 deletions
diff --git a/lua/lexers/snobol4.lua b/lua/lexers/snobol4.lua
index edf085b..0293370 100644
--- a/lua/lexers/snobol4.lua
+++ b/lua/lexers/snobol4.lua
@@ -1,64 +1,69 @@
--- Copyright 2013-2017 Michael T. Richter. See LICENSE.
+-- Copyright 2013-2022 Michael T. Richter. See LICENSE.
-- SNOBOL4 lexer.
-- This lexer works with classic SNOBOL4 as well as the CSNOBOL4 extensions.
-local l = require 'lexer'
-local token, word_match = l.token, l.word_match
-local B, P, R, S, V = lpeg.B, lpeg.P, lpeg.R, lpeg.S, lpeg.V
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local B, P, S = lpeg.B, lpeg.P, lpeg.S
-local M = { _NAME = 'snobol4' }
+local lex = lexer.new('snobol4')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match({
+ 'ABORT', 'ARRAY', 'CONTINUE', 'DEFINE', 'END', 'FRETURN', 'INPUT', 'NRETURN', 'OUTPUT', 'PUNCH',
+ 'RETURN', 'SCONTINUE', 'TABLE'
+}, true) + '&' * lexer.word))
-- Helper patterns.
-local dotted_id = l.word * (P'.' * l.word)^0
+local dotted_id = lexer.word * ('.' * lexer.word)^0
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, lexer.starts_line(dotted_id)))
-local dq_str = l.delimited_range('"', true, true)
-local sq_str = l.delimited_range("'", true, true)
+-- Targets.
+local branch = B(lexer.space * ':(') * dotted_id * #P(')')
+local sbranch = B(lexer.space * ':' * S('SsFf') * '(') * dotted_id * #P(')')
+local sbranchx = B(')' * S('SsFf') * '(') * dotted_id * #P(')')
+lex:add_rule('target', token(lexer.LABEL, branch + sbranch + sbranchx))
-local branch = B(l.space * P':(') * dotted_id * #P')'
-local sbranch = B(l.space * P':' * S'SF' * '(') * dotted_id * #P')'
-local sbranchx = B(P')' * S'SF' * P'(') * dotted_id * #P')'
+-- Patterns.
+lex:add_rule('pattern', lexer.token(lexer.CLASS, word_match({
+ -- Keep distinct.
+ 'ABORT', 'ANY', 'ARB', 'ARBNO', 'BAL', 'BREAK', 'BREAKX', 'FAIL', 'FENCE', 'LEN', 'NOTANY', 'POS',
+ 'REM', 'RPOS', 'RTAB', 'SPAN', 'SUCCEED', 'TAB'
+}, true) * #P('(')))
-- Token definitions.
-local bif = token(l.FUNCTION, l.word_match({
- 'APPLY', 'ARRAY', 'CHAR', 'CONVERT', 'COPY', 'DATA', 'DATE', 'DIFFER', 'DUPL',
- 'EQ', 'EVAL', 'FILE_ABSPATH', 'FILE_ISDIR', 'FREEZE', 'FUNCTION', 'GE', 'GT',
- 'HOST', 'IDENT', 'INTEGER', 'IO_FINDUNIT', 'ITEM', 'LABEL', 'LOAD', 'LPAD',
- 'LE', 'LGT', 'LT', 'NE', 'OPSYN', 'ORD', 'PROTOTYPE', 'REMDR', 'REPLACE',
- 'REVERSE', 'RPAD', 'RSORT', 'SERV_LISTEN', 'SET', 'SETEXIT', 'SIZE', 'SORT',
- 'SQRT', 'SSET', 'SUBSTR', 'TABLE', 'THAW', 'TIME', 'TRACE', 'TRIM', 'UNLOAD',
- 'VALUE', 'VDIFFER',
-}, '', true) * #l.delimited_range('()', false, true, true))
-local comment = token(l.COMMENT, l.starts_line(S'*#|;!' * l.nonnewline^0))
-local control = token(l.PREPROCESSOR, l.starts_line(P'-' * l.word))
-local identifier = token(l.DEFAULT, dotted_id)
-local keyword = token(l.KEYWORD, l.word_match({
- 'ABORT', 'ARRAY', 'CONTINUE', 'DEFINE', 'END', 'FRETURN', 'INPUT', 'NRETURN',
- 'OUTPUT', 'PUNCH', 'RETURN', 'SCONTINUE', 'TABLE',
-}, '', true) + P'&' * l.word)
-local label = token(l.LABEL, l.starts_line(dotted_id))
-local number = token(l.NUMBER, l.float + l.integer)
-local operator = token(l.OPERATOR, S'¬?$.!%*/#+-@⊥&^~\\=')
-local pattern = l.token(l.CLASS, l.word_match({ -- "class" to keep distinct
- 'ABORT', 'ANY', 'ARB', 'ARBNO', 'BAL', 'BREAK', 'BREAKX', 'FAIL', 'FENCE',
- 'LEN', 'NOTANY', 'POS', 'REM', 'RPOS', 'RTAB', 'SPAN', 'SUCCEED', 'TAB',
-}, '', true) * #l.delimited_range('()', false, true, true))
-local str = token(l.STRING, sq_str + dq_str)
-local target = token(l.LABEL, branch + sbranch + sbranchx)
-local ws = token(l.WHITESPACE, l.space^1)
-
-M._rules = {
- { 'comment', comment },
- { 'control', control },
- { 'string', str },
- { 'number', number },
- { 'keyword', keyword },
- { 'label', label },
- { 'target', target },
- { 'pattern', pattern },
- { 'built-in', bif },
- { 'operator', operator },
- { 'identifier', identifier },
- { 'whitespace', ws },
-}
-
-return M
+lex:add_rule('built-in', token(lexer.FUNCTION, word_match({
+ 'APPLY', 'ARRAY', 'CHAR', 'CONVERT', 'COPY', 'DATA', 'DATE', 'DIFFER', 'DUPL', 'EQ', 'EVAL',
+ 'FILE_ABSPATH', 'FILE_ISDIR', 'FREEZE', 'FUNCTION', 'GE', 'GT', 'HOST', 'IDENT', 'INTEGER',
+ 'IO_FINDUNIT', 'ITEM', 'LABEL', 'LOAD', 'LPAD', 'LE', 'LGT', 'LT', 'NE', 'OPSYN', 'ORD',
+ 'PROTOTYPE', 'REMDR', 'REPLACE', 'REVERSE', 'RPAD', 'RSORT', 'SERV_LISTEN', 'SET', 'SETEXIT',
+ 'SIZE', 'SORT', 'SQRT', 'SSET', 'SUBSTR', 'TABLE', 'THAW', 'TIME', 'TRACE', 'TRIM', 'UNLOAD',
+ 'VALUE', 'VDIFFER'
+}, true) * #P('(')))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.DEFAULT, dotted_id))
+
+-- Strings.
+local dq_str = lexer.range('"', true, false)
+local sq_str = lexer.range("'", true, false)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, lexer.starts_line(lexer.to_eol(S('*#|;!')))))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.number))
+
+-- Control.
+lex:add_rule('control', token(lexer.PREPROCESSOR, lexer.starts_line('-' * lexer.word)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S'¬?$.!%*/#+-@⊥&^~\\='))
+
+return lex