File: lexer_unit_test.rb

package info (click to toggle)
ruby-liquid 5.12.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 1,444 kB
  • sloc: ruby: 14,571; makefile: 6
file content (149 lines) | stat: -rw-r--r-- 3,599 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# frozen_string_literal: true

require 'test_helper'

class LexerUnitTest < Minitest::Test
  include Liquid

  def test_strings
    assert_equal(
      [[:string, %('this is a test""')], [:string, %("wat 'lol'")], [:end_of_string]],
      tokenize(%( 'this is a test""' "wat 'lol'")),
    )
  end

  def test_integer
    assert_equal(
      [[:id, 'hi'], [:number, '50'], [:end_of_string]],
      tokenize('hi 50'),
    )
  end

  def test_float
    assert_equal(
      [[:id, 'hi'], [:number, '5.0'], [:end_of_string]],
      tokenize('hi 5.0'),
    )
  end

  def test_comparison
    assert_equal(
      [[:comparison, '=='], [:comparison, '<>'], [:comparison, 'contains'], [:end_of_string]],
      tokenize('== <> contains '),
    )
  end

  def test_comparison_without_whitespace
    assert_equal(
      [[:number, '1'], [:comparison, '>'], [:number, '0'], [:end_of_string]],
      tokenize('1>0'),
    )
  end

  def test_comparison_with_negative_number
    assert_equal(
      [[:number, '1'], [:comparison, '>'], [:number, '-1'], [:end_of_string]],
      tokenize('1>-1'),
    )
  end

  def test_raise_for_invalid_comparison
    assert_raises(SyntaxError) do
      tokenize('1>!1')
    end

    assert_raises(SyntaxError) do
      tokenize('1=<1')
    end

    assert_raises(SyntaxError) do
      tokenize('1!!1')
    end
  end

  def test_specials
    assert_equal(
      [[:pipe, '|'], [:dot, '.'], [:colon, ':'], [:end_of_string]],
      tokenize('| .:'),
    )

    assert_equal(
      [[:open_square, '['], [:comma, ','], [:close_square, ']'], [:end_of_string]],
      tokenize('[,]'),
    )
  end

  def test_fancy_identifiers
    assert_equal([[:id, 'hi'], [:id, 'five?'], [:end_of_string]], tokenize('hi five?'))

    assert_equal([[:number, '2'], [:id, 'foo'], [:end_of_string]], tokenize('2foo'))
  end

  def test_whitespace
    assert_equal(
      [[:id, 'five'], [:pipe, '|'], [:comparison, '=='], [:end_of_string]],
      tokenize("five|\n\t =="),
    )
  end

  def test_unexpected_character
    assert_raises(SyntaxError) do
      tokenize("%")
    end
  end

  def test_negative_numbers
    assert_equal(
      [[:id, 'foo'], [:pipe, '|'], [:id, 'default'], [:colon, ":"], [:number, '-1'], [:end_of_string]],
      tokenize("foo | default: -1"),
    )
  end

  def test_greater_than_two_digits
    assert_equal(
      [[:id, 'foo'], [:comparison, '>'], [:number, '12'], [:end_of_string]],
      tokenize("foo > 12"),
    )
  end

  def test_error_with_utf8_character
    error = assert_raises(SyntaxError) do
      tokenize("1 < 1Ø")
    end

    assert_equal(
      'Liquid syntax error: Unexpected character Ø',
      error.message,
    )
  end

  def test_contains_as_attribute_name
    assert_equal(
      [[:id, "a"], [:dot, "."], [:id, "contains"], [:dot, "."], [:id, "b"], [:end_of_string]],
      tokenize("a.contains.b"),
    )
  end

  def test_tokenize_incomplete_expression
    assert_equal([[:id, "false"], [:dash, "-"], [:end_of_string]], tokenize("false -"))
    assert_equal([[:id, "false"], [:comparison, "<"], [:end_of_string]], tokenize("false <"))
    assert_equal([[:id, "false"], [:comparison, ">"], [:end_of_string]], tokenize("false >"))
    assert_equal([[:id, "false"], [:number, "1"], [:end_of_string]], tokenize("false 1"))
  end

  def test_error_with_invalid_utf8
    error = assert_raises(SyntaxError) do
      tokenize("\x00\xff")
    end
    assert_equal(
      'Liquid syntax error: Invalid byte sequence in UTF-8',
      error.message,
    )
  end

  private

  def tokenize(input)
    Lexer.tokenize(StringScanner.new(input))
  end
end