File: xtend.lua

package info (click to toggle)
vis 0.9-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 12,624 kB
  • sloc: ansic: 23,195; sh: 981; makefile: 363; python: 47
file content (88 lines) | stat: -rw-r--r-- 2,934 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
-- Copyright (c) 2014-2024 Piotr Orzechowski [drzewo.org]. See LICENSE.
-- Xtend LPeg lexer.

local lexer = require('lexer')
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('xtend')

-- Whitespace.
local ws = token(lexer.WHITESPACE, lexer.space^1)
lex:add_rule('whitespace', ws)

-- Classes.
lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws^1 * token(lexer.CLASS, lexer.word))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
  -- General.
  'abstract', 'annotation', 'as', 'case', 'catch', 'class', 'create', 'def', 'default', 'dispatch',
  'do', 'else', 'enum', 'extends', 'extension', 'final', 'finally', 'for', 'if', 'implements',
  'import', 'interface', 'instanceof', 'it', 'new', 'override', 'package', 'private', 'protected',
  'public', 'return', 'self', 'static', 'super', 'switch', 'synchronized', 'this', 'throw',
  'throws', 'try', 'typeof', 'val', 'var', 'while',
  -- Templates.
  'AFTER', 'BEFORE', 'ENDFOR', 'ENDIF', 'FOR', 'IF', 'SEPARATOR',
  -- Literals.
  'true', 'false', 'null'
}))

-- Types.
lex:add_rule('type', token(lexer.TYPE, word_match{
  'boolean', 'byte', 'char', 'double', 'float', 'int', 'long', 'short', 'void', 'Boolean', 'Byte',
  'Character', 'Double', 'Float', 'Integer', 'Long', 'Short', 'String'
}))

-- Functions.
lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('('))

-- Identifiers.
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))

-- Templates.
lex:add_rule('template', token(lexer.EMBEDDED, lexer.range("'''")))

-- Strings.
local sq_str = lexer.range("'", true)
local dq_str = lexer.range('"', true)
lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))

-- Comments.
local line_comment = lexer.to_eol('//', true)
local block_comment = lexer.range('/*', '*/')
lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))

-- Numbers.
local small_suff = S('lL')
local med_suff = S('bB') * S('iI')
local large_suff = S('dD') + S('fF') + S('bB') * S('dD')
local exp = S('eE') * lexer.digit^1

local dec_inf = ('_' * lexer.digit^1)^0
local hex_inf = ('_' * lexer.xdigit^1)^0
local float_pref = lexer.digit^1 * '.' * lexer.digit^1
local float_suff = exp^-1 * med_suff^-1 * large_suff^-1

local dec = lexer.digit * dec_inf * (small_suff^-1 + float_suff)
local hex = lexer.hex_num * hex_inf * P('#' * (small_suff + med_suff))^-1
local float = float_pref * dec_inf * float_suff

lex:add_rule('number', token(lexer.NUMBER, float + hex + dec))

-- Annotations.
lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word))

-- Operators.
lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#')))

-- Error.
lex:add_rule('error', token(lexer.ERROR, lexer.any))

-- Fold points.
lex:add_fold_point(lexer.OPERATOR, '{', '}')
lex:add_fold_point(lexer.COMMENT, '/*', '*/')

lexer.property['scintillua.comment'] = '//'

return lex