1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
|
package lexer
import (
"strings"
"testing"
"text/scanner"
"github.com/stretchr/testify/require"
)
func TestLexer(t *testing.T) {
lexer, err := Upgrade(LexString("hello world"))
require.NoError(t, err)
helloPos := Position{Offset: 0, Line: 1, Column: 1}
worldPos := Position{Offset: 6, Line: 1, Column: 7}
eofPos := Position{Offset: 11, Line: 1, Column: 12}
require.Equal(t, Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, mustPeek(t, lexer, 0))
require.Equal(t, Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, mustPeek(t, lexer, 0))
require.Equal(t, Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, mustNext(t, lexer))
require.Equal(t, Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, mustPeek(t, lexer, 0))
require.Equal(t, Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, mustNext(t, lexer))
require.Equal(t, Token{Type: scanner.EOF, Value: "", Pos: eofPos}, mustPeek(t, lexer, 0))
require.Equal(t, Token{Type: scanner.EOF, Value: "", Pos: eofPos}, mustNext(t, lexer))
}
func TestLexString(t *testing.T) {
lexer := LexString(`"hello\nworld"`)
token, err := lexer.Next()
require.NoError(t, err)
require.Equal(t, Token{Type: scanner.String, Value: "hello\nworld", Pos: Position{Line: 1, Column: 1}}, token)
}
func TestLexSingleString(t *testing.T) {
lexer := LexString(`'hello\nworld'`)
token, err := lexer.Next()
require.NoError(t, err)
require.Equal(t, Token{Type: scanner.String, Value: "hello\nworld", Pos: Position{Line: 1, Column: 1}}, token)
lexer = LexString(`'\U00008a9e'`)
token, err = lexer.Next()
require.NoError(t, err)
require.Equal(t, Token{Type: scanner.Char, Value: "\U00008a9e", Pos: Position{Line: 1, Column: 1}}, token)
}
func BenchmarkTextScannerLexer(b *testing.B) {
input := strings.Repeat("hello world 123 hello world 123", 100)
r := strings.NewReader(input)
b.ReportMetric(float64(len(input)), "B")
b.ReportAllocs()
for i := 0; i < b.N; i++ {
lex, _ := TextScannerLexer.Lex(r)
for {
token, _ := lex.Next()
if token.Type == EOF {
break
}
}
_, _ = r.Seek(0, 0)
}
}
|