File: lexing.rb

package info (click to toggle)
ruby-rouge 4.7.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 6,844 kB
  • sloc: ruby: 38,489; sed: 2,071; perl: 152; makefile: 8
file content (58 lines) | stat: -rw-r--r-- 1,399 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
# -*- coding: utf-8 -*- #
# frozen_string_literal: true

module Support
  module Lexing
    def filter_by_token(target_token, text, lexer=nil)
      lexer ||= subject

      tokens = lexer.lex(text)

      tokens.select do |(tok, _)|
        same_token?(tok, target_token)
      end
    end

    def same_token?(token, target)
      if token.respond_to? :token_chain
        token.token_chain.include?(Rouge::Token[target])
      else
        token == target
      end
    end

    def deny_has_token(tokname, text, lexer=nil)
      refute { filter_by_token(tokname, text, lexer).any? }
    end

    def assert_has_token(tokname, text, lexer=nil)
      assert { filter_by_token(tokname, text, lexer).any? }
    end

    def assert_no_errors(*a)
      deny_has_token('Error', *a)
    end

    def assert_tokens_equal(text, *expected)
      if expected.first.is_a? Rouge::Lexer
        lexer = expected.shift
      else
        lexer = subject
      end

      actual = lexer.lex(text).map { |token, value| [ token.qualname, value ] }
      assert { expected == actual }
    end

    def assert_tokens_includes(text, *expected)
      if expected.first.is_a? Rouge::Lexer
        lexer = expected.shift
      else
        lexer = subject
      end

      actual = lexer.lex(text).map { |token, value| [ token.qualname, value ] }
      expected.all? { |e| assert_includes actual, e  }
    end
  end
end