File: lexer_unit_test.rb

package info (click to toggle)
ruby-liquid 5.4.0-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 1,176 kB
  • sloc: ruby: 10,561; makefile: 6
file content (53 lines) | stat: -rw-r--r-- 1,639 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# frozen_string_literal: true

require 'test_helper'

class LexerUnitTest < Minitest::Test
  include Liquid

  def test_strings
    tokens = Lexer.new(%( 'this is a test""' "wat 'lol'")).tokenize
    assert_equal([[:string, %('this is a test""')], [:string, %("wat 'lol'")], [:end_of_string]], tokens)
  end

  def test_integer
    tokens = Lexer.new('hi 50').tokenize
    assert_equal([[:id, 'hi'], [:number, '50'], [:end_of_string]], tokens)
  end

  def test_float
    tokens = Lexer.new('hi 5.0').tokenize
    assert_equal([[:id, 'hi'], [:number, '5.0'], [:end_of_string]], tokens)
  end

  def test_comparison
    tokens = Lexer.new('== <> contains ').tokenize
    assert_equal([[:comparison, '=='], [:comparison, '<>'], [:comparison, 'contains'], [:end_of_string]], tokens)
  end

  def test_specials
    tokens = Lexer.new('| .:').tokenize
    assert_equal([[:pipe, '|'], [:dot, '.'], [:colon, ':'], [:end_of_string]], tokens)
    tokens = Lexer.new('[,]').tokenize
    assert_equal([[:open_square, '['], [:comma, ','], [:close_square, ']'], [:end_of_string]], tokens)
  end

  def test_fancy_identifiers
    tokens = Lexer.new('hi five?').tokenize
    assert_equal([[:id, 'hi'], [:id, 'five?'], [:end_of_string]], tokens)

    tokens = Lexer.new('2foo').tokenize
    assert_equal([[:number, '2'], [:id, 'foo'], [:end_of_string]], tokens)
  end

  def test_whitespace
    tokens = Lexer.new("five|\n\t ==").tokenize
    assert_equal([[:id, 'five'], [:pipe, '|'], [:comparison, '=='], [:end_of_string]], tokens)
  end

  def test_unexpected_character
    assert_raises(SyntaxError) do
      Lexer.new("%").tokenize
    end
  end
end