aboutsummaryrefslogtreecommitdiff
path: root/lua/lexers/erlang.lua
blob: d4d48b725e23353bcb11a92fd9616e02b0ba8bd2 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
-- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE.
-- Erlang LPeg lexer.

local l = require('lexer')
local token, word_match = l.token, l.word_match
local P, R, S = lpeg.P, lpeg.R, lpeg.S

local M = {_NAME = 'erlang'}

-- Whitespace.
local ws = token(l.WHITESPACE, l.space^1)

-- Comments.
local comment = token(l.COMMENT, '%' * l.nonnewline^0)

-- Strings.
local string = token(l.STRING, l.delimited_range('"'))

-- Numbers.
local const_char = '$' * (('\\' * l.ascii) + l.any)
local number = token(l.NUMBER, const_char + l.float + l.integer)

-- Atoms.
local atom_pat = (l.lower * (l.alnum + '_')^0) + l.delimited_range("'")
local atom = token(l.LABEL, atom_pat)

-- Functions.
local func = token(l.FUNCTION, atom_pat * #l.delimited_range("()", false, false, true))

-- Keywords.
local keyword = token(l.KEYWORD, word_match{
  'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', 'let', 'of',
  'query', 'receive', 'when'
})

-- Identifiers.
local identifier = token(l.IDENTIFIER, ((l.upper + '_') * (l.alnum + '_')^0))

-- Operators.
local named_operator = word_match{
  'div', 'rem', 'or', 'xor', 'bor', 'bxor', 'bsl', 'bsr', 'and', 'band', 'not',
  'bnot'
}
local operator = token(l.OPERATOR, S('-<>.;=/|#+*:,?!()[]{}') + named_operator)

-- Directives.
local directive = token('directive', '-' * word_match{
  'author', 'compile', 'copyright', 'define', 'doc', 'else', 'endif', 'export',
  'file', 'ifdef', 'ifndef', 'import', 'include_lib', 'include', 'module',
  'record', 'undef'
})

M._rules = {
  {'whitespace', ws},
  {'keyword', keyword},
  {'function', func},
  {'operator', operator},
  {'atom', atom},
  {'identifier', identifier},
  {'directive', directive},
  {'string', string},
  {'comment', comment},
  {'number', number}
}

M._tokenstyles = {
  directive = l.STYLE_PREPROCESSOR
}

M._foldsymbols = {
  _patterns = {'[a-z]+', '[%(%)%[%]{}]', '%%'},
  [l.KEYWORD] = {
    case = 1, fun = 1, ['if'] = 1, query = 1, receive = 1, ['end'] = -1
  },
  [l.OPERATOR] = {
    ['('] = 1, [')'] = -1, ['['] = 1, [']'] = -1, ['{'] = 1, ['}'] = -1
  },
  [l.COMMENT] = {['%'] = l.fold_line_comments('%')}
}

return M