1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
|
# frozen_string_literal: true
require 'test_helper'
class TokenizerTest < Minitest::Test
def test_tokenize_strings
assert_equal([' '], tokenize(' '))
assert_equal(['hello world'], tokenize('hello world'))
end
def test_tokenize_variables
assert_equal(['{{funk}}'], tokenize('{{funk}}'))
assert_equal([' ', '{{funk}}', ' '], tokenize(' {{funk}} '))
assert_equal([' ', '{{funk}}', ' ', '{{so}}', ' ', '{{brother}}', ' '], tokenize(' {{funk}} {{so}} {{brother}} '))
assert_equal([' ', '{{ funk }}', ' '], tokenize(' {{ funk }} '))
end
def test_tokenize_blocks
assert_equal(['{%comment%}'], tokenize('{%comment%}'))
assert_equal([' ', '{%comment%}', ' '], tokenize(' {%comment%} '))
assert_equal([' ', '{%comment%}', ' ', '{%endcomment%}', ' '], tokenize(' {%comment%} {%endcomment%} '))
assert_equal([' ', '{% comment %}', ' ', '{% endcomment %}', ' '], tokenize(" {% comment %} {% endcomment %} "))
end
def test_calculate_line_numbers_per_token_with_profiling
assert_equal([1], tokenize_line_numbers("{{funk}}"))
assert_equal([1, 1, 1], tokenize_line_numbers(" {{funk}} "))
assert_equal([1, 2, 2], tokenize_line_numbers("\n{{funk}}\n"))
assert_equal([1, 1, 3], tokenize_line_numbers(" {{\n funk \n}} "))
end
private
def new_tokenizer(source, parse_context: Liquid::ParseContext.new, start_line_number: nil)
parse_context.new_tokenizer(source, start_line_number: start_line_number)
end
def tokenize(source)
tokenizer = new_tokenizer(source)
tokens = []
# shift is private in Liquid::C::Tokenizer, since it is only for unit testing
while (t = tokenizer.send(:shift))
tokens << t
end
tokens
end
def tokenize_line_numbers(source)
tokenizer = new_tokenizer(source, start_line_number: 1)
line_numbers = []
loop do
line_number = tokenizer.line_number
if tokenizer.send(:shift)
line_numbers << line_number
else
break
end
end
line_numbers
end
end
|