1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
|
package css_lexer
import (
"strings"
"testing"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/test"
)
func lexToken(contents string) (T, string) {
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
result := Tokenize(log, test.SourceForTest(contents), Options{})
if len(result.Tokens) > 0 {
t := result.Tokens[0]
return t.Kind, t.DecodedText(contents)
}
return TEndOfFile, ""
}
func lexerError(contents string) string {
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
Tokenize(log, test.SourceForTest(contents), Options{})
var text strings.Builder
for _, msg := range log.Done() {
text.WriteString(msg.String(logger.OutputOptions{}, logger.TerminalInfo{}))
}
return text.String()
}
func TestTokens(t *testing.T) {
expected := []struct {
contents string
text string
token T
}{
{"", "end of file", TEndOfFile},
{"@media", "@-keyword", TAtKeyword},
{"url(x y", "bad URL token", TBadURL},
{"-->", "\"-->\"", TCDC},
{"<!--", "\"<!--\"", TCDO},
{"}", "\"}\"", TCloseBrace},
{"]", "\"]\"", TCloseBracket},
{")", "\")\"", TCloseParen},
{":", "\":\"", TColon},
{",", "\",\"", TComma},
{"?", "delimiter", TDelim},
{"&", "\"&\"", TDelimAmpersand},
{"*", "\"*\"", TDelimAsterisk},
{"|", "\"|\"", TDelimBar},
{"^", "\"^\"", TDelimCaret},
{"$", "\"$\"", TDelimDollar},
{".", "\".\"", TDelimDot},
{"=", "\"=\"", TDelimEquals},
{"!", "\"!\"", TDelimExclamation},
{">", "\">\"", TDelimGreaterThan},
{"+", "\"+\"", TDelimPlus},
{"/", "\"/\"", TDelimSlash},
{"~", "\"~\"", TDelimTilde},
{"1px", "dimension", TDimension},
{"max(", "function token", TFunction},
{"#name", "hash token", THash},
{"name", "identifier", TIdent},
{"123", "number", TNumber},
{"{", "\"{\"", TOpenBrace},
{"[", "\"[\"", TOpenBracket},
{"(", "\"(\"", TOpenParen},
{"50%", "percentage", TPercentage},
{";", "\";\"", TSemicolon},
{"'abc'", "string token", TString},
{"url(test)", "URL token", TURL},
{" ", "whitespace", TWhitespace},
}
for _, it := range expected {
contents := it.contents
token := it.token
t.Run(contents, func(t *testing.T) {
kind, _ := lexToken(contents)
test.AssertEqual(t, kind, token)
})
}
}
func TestStringParsing(t *testing.T) {
contentsOfStringToken := func(contents string) string {
t.Helper()
kind, text := lexToken(contents)
test.AssertEqual(t, kind, TString)
return text
}
test.AssertEqual(t, contentsOfStringToken("\"foo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\oo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\"o\""), "f\"o")
test.AssertEqual(t, contentsOfStringToken("\"f\\\\o\""), "f\\o")
test.AssertEqual(t, contentsOfStringToken("\"f\\\no\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\ro\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\r\no\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\fo\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6fo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "fo o")
test.AssertEqual(t, contentsOfStringToken("\"f\\fffffffo\""), "f\uFFFDfo")
test.AssertEqual(t, contentsOfStringToken("\"f\\10abcdeo\""), "f\U0010ABCDeo")
}
func TestURLParsing(t *testing.T) {
contentsOfURLToken := func(expected T, contents string) string {
t.Helper()
kind, text := lexToken(contents)
test.AssertEqual(t, kind, expected)
return text
}
test.AssertEqual(t, contentsOfURLToken(TURL, "url(foo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url( foo\t\t)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\oo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\\"o)"), "f\"o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\'o)"), "f'o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\)o)"), "f)o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6fo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6f o)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TBadURL, "url(f\\6f o)"), "url(f\\6f o)")
}
func TestComment(t *testing.T) {
test.AssertEqualWithDiff(t, lexerError("/*"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n")
test.AssertEqualWithDiff(t, lexerError("/*/"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n")
test.AssertEqualWithDiff(t, lexerError("/**/"), "")
test.AssertEqualWithDiff(t, lexerError("//"), "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n")
}
func TestString(t *testing.T) {
test.AssertEqualWithDiff(t, lexerError("'"), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("\""), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("'\\'"), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("\"\\\""), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("''"), "")
test.AssertEqualWithDiff(t, lexerError("\"\""), "")
}
func TestBOM(t *testing.T) {
// A byte order mark should not be parsed as an identifier
kind, _ := lexToken("\uFEFF.")
test.AssertEqual(t, kind, TDelimDot)
}
|