File: dictionary-compressed.tentative.https.html

package info (click to toggle)
firefox 144.0-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 4,637,504 kB
  • sloc: cpp: 7,576,692; javascript: 6,430,831; ansic: 3,748,119; python: 1,398,978; xml: 628,810; asm: 438,679; java: 186,194; sh: 63,212; makefile: 19,159; objc: 13,086; perl: 12,986; yacc: 4,583; cs: 3,846; pascal: 3,448; lex: 1,720; ruby: 1,003; exp: 762; php: 436; lisp: 258; awk: 247; sql: 66; sed: 53; csh: 10
file content (105 lines) | stat: -rw-r--r-- 5,192 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>

// This is a set of tests for the dictionary itself being compressed, both by
// non-dictionary content encodings and dictionary encodings. The encoding used
// for the dictionary itself is independent of the encoding used for the data
// so the test uses different encodings just to make sure that the dictionaries
// don't carry any encoding-specific dependencies.

compression_dictionary_promise_test(async (t) => {
  const dictionaryUrl =
    `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=gzip`;
  const dict = await (await fetch(dictionaryUrl)).text();
  assert_equals(dict, kDefaultDictionaryContent);
  const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
  assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

  // Check if the data compressed using the dictionary can be decompressed.
  const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
  const data = await (await fetch(data_url)).text();
  assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using gzip-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
  const dictionaryUrl =
    `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=br`;
  const dict = await (await fetch(dictionaryUrl)).text();
  assert_equals(dict, kDefaultDictionaryContent);
  const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
  assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

  // Check if the data compressed using the dictionary can be decompressed.
  const data_url = `${kCompressedDataPath}?content_encoding=dcz`;
  const data = await (await fetch(data_url)).text();
  assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using Brotli-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
  const dictionaryUrl =
    `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=zstd`;
  const dict = await (await fetch(dictionaryUrl)).text();
  assert_equals(dict, kDefaultDictionaryContent);
  const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
  assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

  // Check if the data compressed using Brotli with the dictionary can be
  // decompressed (Zstandard decompression of the data is tested separately).
  const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
  const data = await (await fetch(data_url)).text();
  assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using Zstandard-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
  const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
  const dict = await (await fetch(dictionaryUrl)).text();
  assert_equals(dict, kDefaultDictionaryContent);
  assert_equals(
      await waitUntilAvailableDictionaryHeader(t, {}),
      kDefaultDictionaryHashBase64);

  // Register another dictionary, compressed with dcb using the first dictionary.
  const compressedDictionaryUrl =
    `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcb&id=id2`;
  const dict2 = await (await fetch(compressedDictionaryUrl)).text();
  assert_equals(dict2, kDefaultDictionaryContent);
  await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});

  // Check if the data compressed using dcz with the updated dictionary works.
  const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcz`;
  const data = await (await fetch(data_url)).text();
  assert_equals(data, kExpectedCompressedData);
}, 'A dcb dictionary-compressed dictionary can be used as a dictionary for future requests.');

compression_dictionary_promise_test(async (t) => {
  const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
  const dict = await (await fetch(dictionaryUrl)).text();
  assert_equals(dict, kDefaultDictionaryContent);
  assert_equals(
      await waitUntilAvailableDictionaryHeader(t, {}),
      kDefaultDictionaryHashBase64);

  // Register another dictionary, compressed with dcz using the first dictionary.
  const compressedDictionaryUrl =
    `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcz&id=id2`;
  const dict2 = await (await fetch(compressedDictionaryUrl)).text();
  assert_equals(dict2, kDefaultDictionaryContent);
  await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});

  // Check if the data compressed using dcb with the updated dictionary works.
  const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcb`;
  const data = await (await fetch(data_url)).text();
  assert_equals(data, kExpectedCompressedData);
}, 'A dcz dictionary-compressed dictionary can be used as a dictionary for future requests.');

</script>
</body>