File: lexer_test.go

package info (click to toggle)
golang-github-alecthomas-chroma 0.10.0-5
  • links: PTS, VCS
  • area: main
  • in suites: sid, trixie
  • size: 6,652 kB
  • sloc: python: 426; javascript: 79; makefile: 34; sh: 32
file content (52 lines) | stat: -rw-r--r-- 1,199 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
package chroma

import (
	"testing"

	"github.com/stretchr/testify/assert"
)

func TestTokenTypeClassifiers(t *testing.T) {
	assert.True(t, GenericDeleted.InCategory(Generic))
	assert.True(t, LiteralStringBacktick.InSubCategory(String))
	assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
}

func TestSimpleLexer(t *testing.T) {
	lexer, err := NewLexer( // nolint: forbidigo
		&Config{
			Name:      "INI",
			Aliases:   []string{"ini", "cfg"},
			Filenames: []string{"*.ini", "*.cfg"},
		},
		map[string][]Rule{
			"root": {
				{`\s+`, Whitespace, nil},
				{`;.*?$`, Comment, nil},
				{`\[.*?\]$`, Keyword, nil},
				{`(.*?)(\s*)(=)(\s*)(.*?)$`, ByGroups(Name, Whitespace, Operator, Whitespace, String), nil},
			},
		},
	)
	assert.NoError(t, err)
	actual, err := Tokenise(lexer, nil, `
	; this is a comment
	[section]
	a = 10
`)
	assert.NoError(t, err)
	expected := []Token{
		{Whitespace, "\n\t"},
		{Comment, "; this is a comment"},
		{Whitespace, "\n\t"},
		{Keyword, "[section]"},
		{Whitespace, "\n\t"},
		{Name, "a"},
		{Whitespace, " "},
		{Operator, "="},
		{Whitespace, " "},
		{LiteralString, "10"},
		{Whitespace, "\n"},
	}
	assert.Equal(t, expected, actual)
}