File: serialize-consecutive-tokens.html

package info (click to toggle)
thunderbird 1%3A143.0.1-1
  • links: PTS, VCS
  • area: main
  • in suites: experimental
  • size: 4,703,968 kB
  • sloc: cpp: 7,770,492; javascript: 5,943,842; ansic: 3,918,754; python: 1,418,263; xml: 653,354; asm: 474,045; java: 183,079; sh: 111,238; makefile: 20,410; perl: 14,359; objc: 13,059; yacc: 4,583; pascal: 3,405; lex: 1,720; ruby: 999; exp: 762; sql: 715; awk: 580; php: 436; lisp: 430; sed: 69; csh: 10
file content (137 lines) | stat: -rw-r--r-- 4,748 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
<!doctype html>
<title>Serialization of consecutive tokens.</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>

<meta name="author" title="Tab Atkins-Bittner">
<link rel=help href="https://drafts.csswg.org/css-syntax/#serialization">
<body>
<!--
    The serialization chapter provides a table listing all the combinations of consecutive tokens that will,
    if naively serialized next to each other,
    produce a different set of tokens when re-parsed.
    The spec requires that a comment must be inserted between such tokens in the serialization,
    to ensure that they round-trip correctly.
-->

<script>

function testTokenPairs(t1, t2) {
    const b = document.body;
    test(()=>{
        b.style.setProperty("--t1", t1);
        b.style.setProperty("--t2", t2);
        b.style.setProperty("--result", "var(--t1)var(--t2)");
        const result = getComputedStyle(b).getPropertyValue("--result");
        assert_equals(result.slice(0, t1.length), t1, `Result must start with ${t1}`);
        assert_equals(result.slice(-t2.length), t2, `Result must end with ${t2}`);
        assert_not_equals(result, t1+t2, `Result must have a comment between ${t1} and ${t2}`);
    }, `Serialization of consecutive ${t1} and ${t2} tokens.`);
}
testTokenPairs("foo", "bar");
testTokenPairs("foo", "bar()");
testTokenPairs("foo", "url(bar)");
testTokenPairs("foo", "-");
testTokenPairs("foo", "123");
testTokenPairs("foo", "123%");
testTokenPairs("foo", "123em");
testTokenPairs("foo", "-->");
testTokenPairs("foo", "()");

testTokenPairs("@foo", "bar");
testTokenPairs("@foo", "bar()");
testTokenPairs("@foo", "url(bar)");
testTokenPairs("@foo", "-");
testTokenPairs("@foo", "123");
testTokenPairs("@foo", "123%");
testTokenPairs("@foo", "123em");
testTokenPairs("@foo", "-->");

testTokenPairs("#foo", "bar");
testTokenPairs("#foo", "bar()");
testTokenPairs("#foo", "url(bar)");
testTokenPairs("#foo", "-");
testTokenPairs("#foo", "123");
testTokenPairs("#foo", "123%");
testTokenPairs("#foo", "123em");
testTokenPairs("#foo", "-->");

testTokenPairs("123foo", "bar");
testTokenPairs("123foo", "bar()");
testTokenPairs("123foo", "url(bar)");
testTokenPairs("123foo", "-");
testTokenPairs("123foo", "123");
testTokenPairs("123foo", "123%");
testTokenPairs("123foo", "123em");
testTokenPairs("123foo", "-->");

testTokenPairs("#", "bar");
testTokenPairs("#", "bar()");
testTokenPairs("#", "url(bar)");
testTokenPairs("#", "-");
testTokenPairs("#", "123");
testTokenPairs("#", "123%");
testTokenPairs("#", "123em");

testTokenPairs("-", "bar");
testTokenPairs("-", "bar()");
testTokenPairs("-", "url(bar)");
testTokenPairs("-", "-");
testTokenPairs("-", "123");
testTokenPairs("-", "123%");
testTokenPairs("-", "123em");

testTokenPairs("123", "bar");
testTokenPairs("123", "bar()");
testTokenPairs("123", "url(bar)");
testTokenPairs("123", "123");
testTokenPairs("123", "123%");
testTokenPairs("123", "123em");
testTokenPairs("123", "%");

testTokenPairs("@", "bar");
testTokenPairs("@", "bar()");
testTokenPairs("@", "url(bar)");
testTokenPairs("@", "-");

testTokenPairs(".", "123");
testTokenPairs(".", "123%");
testTokenPairs(".", "123em");

testTokenPairs("+", "123");
testTokenPairs("+", "123%");
testTokenPairs("+", "123em");

testTokenPairs("/", "*");

// Test that interior comments are preserved, but exterior ones are not.
function testComments(text, t1, expected) {
    const b = document.body;
    test(()=>{
        b.style.setProperty("--t1", t1);
        b.style.setProperty("--result", text);
        const result = getComputedStyle(b).getPropertyValue("--result");
        assert_equals(result, expected);
    }, `Comments are handled correctly when computing ${text} using t1:${t1}.`);
}
testComments("a/* comment */b", "", "a/* comment */b");
testComments("a/* comment */var(--t1)", "b", "a/**/b");
testComments("var(--t1)b", "a/* comment */", "a/**/b");

// Test comments within quotes.
testComments("var(--t1)b", "'a/* unfinished '", "'a/* unfinished 'b");
testComments("var(--t1)b", "\"a/* unfinished \"", "\"a/* unfinished \"b");
testComments("var(--t1)b", "'a \" '/* comment */", "'a \" 'b");

test(()=>{
    const b = document.body;
    b.style.setProperty("--t1", "a");
    b.style.setProperty("--t2", "b");
    b.style.setProperty("--result", "var(--t1)var(--does-not-exist,)var(--t2)");
    const result = getComputedStyle(b).getPropertyValue("--result");
    assert_equals(result[0], "a", `Result must start with a`);
    assert_equals(result[result.length - 1], "b", `Result must end with b`);
    assert_not_equals(result, "ab", `Result must have a comment between a and b`);
}, 'Empty fallback between tokens must not disturb comment insertion');

</script>