File: scala.lua

package info (click to toggle)
vis 0.9-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 12,624 kB
  • sloc: ansic: 23,195; sh: 981; makefile: 363; python: 47
file content (61 lines) | stat: -rw-r--r-- 2,026 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
-- Copyright 2006-2024 JMS. See LICENSE.
-- Scala LPeg lexer.

local lexer = require('lexer')
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('scala')

-- Whitespace.
local ws = token(lexer.WHITESPACE, lexer.space^1)
lex:add_rule('whitespace', ws)

-- Classes.
lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws^1 * token(lexer.CLASS, lexer.word))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
  'abstract', 'case', 'catch', 'class', 'def', 'do', 'else', 'extends', 'false', 'final', 'finally',
  'for', 'forSome', 'if', 'implicit', 'import', 'lazy', 'match', 'new', 'null', 'object',
  'override', 'package', 'private', 'protected', 'return', 'sealed', 'super', 'this', 'throw',
  'trait', 'try', 'true', 'type', 'val', 'var', 'while', 'with', 'yield'
}))

-- Types.
lex:add_rule('type', token(lexer.TYPE, word_match{
  'Array', 'Boolean', 'Buffer', 'Byte', 'Char', 'Collection', 'Double', 'Float', 'Int', 'Iterator',
  'LinkedList', 'List', 'Long', 'Map', 'None', 'Option', 'Set', 'Short', 'SortedMap', 'SortedSet',
  'String', 'TreeMap', 'TreeSet'
}))

-- Functions.
lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('('))

-- Identifiers.
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))

-- Strings.
local symbol = "'" * lexer.word
local dq_str = lexer.range('"', true)
local tq_str = lexer.range('"""')
lex:add_rule('string', token(lexer.STRING, tq_str + symbol + dq_str))

-- Comments.
local line_comment = lexer.to_eol('//', true)
local block_comment = lexer.range('/*', '*/')
lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))

-- Numbers.
lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd')^-1))

-- Operators.
lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))

-- Fold points.
lex:add_fold_point(lexer.OPERATOR, '{', '}')
lex:add_fold_point(lexer.COMMENT, '/*', '*/')

lexer.property['scintillua.comment'] = '//'

return lex