aboutsummaryrefslogtreecommitdiff
path: root/lexers/dart.lua
blob: c2371289ce7d4826484fe0b18fd0c17239d323a3 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
-- Dart LPeg lexer.
-- Written by Brian Schott (@Hackerpilot on Github).

local l = require('lexer')
local token, word_match = l.token, l.word_match
local P, R, S = lpeg.P, lpeg.R, lpeg.S

local M = {_NAME = 'dart'}

-- Whitespace.
local ws = token(l.WHITESPACE, l.space^1)

-- Comments.
local line_comment = '//' * l.nonnewline_esc^0
local nested_comment = l.nested_pair('/*', '*/')
local comment = token(l.COMMENT, line_comment + nested_comment)

-- Strings.
local sq_str = S('r')^-1 * l.delimited_range("'", true)
local dq_str = S('r')^-1 * l.delimited_range('"', true)
local sq_str_multiline = S('r')^-1 * l.delimited_range('"""')
local dq_str_multiline = S('r')^-1 * l.delimited_range("''' ")
local string = token(l.STRING,
                     sq_str + dq_str + sq_str_multiline + dq_str_multiline)

-- Numbers.
local number = token(l.NUMBER, (l.float + l.hex_num))

-- Keywords.
local keyword = token(l.KEYWORD, word_match{
  'assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default',
  'do', 'else', 'enum', 'extends', 'false', 'final' , 'finally', 'for', 'if',
  'in', 'is', 'new', 'null', 'rethrow', 'return', 'super', 'switch', 'this',
  'throw', 'true', 'try', 'var', 'void', 'while', 'with',
})

local builtin_identifiers = token(l.CONSTANT, word_match{
  'abstract', 'as', 'dynamic', 'export', 'external', 'factory', 'get',
  'implements', 'import', 'library', 'operator', 'part', 'set', 'static',
  'typedef'
})

-- Identifiers.
local identifier = token(l.IDENTIFIER, l.word)

-- Operators.
local operator = token(l.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}'))

-- Preprocs.
local annotation = token('annotation', '@' * l.word^1)

M._rules = {
  {'whitespace', ws},
  {'keyword', keyword},
  {'constant', builtin_identifiers},
  {'string', string},
  {'identifier', identifier},
  {'comment', comment},
  {'number', number},
  {'operator', operator},
  {'annotation', annotation},
}

M._tokenstyles = {
  annotation = l.STYLE_PREPROCESSOR,
}

M._foldsymbols = {
  _patterns = {'[{}]', '/[*+]', '[*+]/', '//'},
  [l.OPERATOR] = {['{'] = 1, ['}'] = -1},
  [l.COMMENT] = {
    ['/*'] = 1, ['*/'] = -1, ['/+'] = 1, ['+/'] = -1,
    ['//'] = l.fold_line_comments('//')
  }
}

return M