File: tokenizer_test_parser.rb

package info (click to toggle)
libfeedtools-ruby 0.2.29%2Bdfsg1-4
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 2,004 kB
  • ctags: 1,385
  • sloc: ruby: 18,815; sql: 39; makefile: 6
file content (63 lines) | stat: -rw-r--r-- 1,376 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
require 'html5/constants'

class TokenizerTestParser
  def initialize(tokenizer)
    @tokenizer = tokenizer
  end

  def parse
    @outputTokens = []

    debug = nil
    for token in @tokenizer
      debug = token.inspect if token[:type] == :ParseError
      send(('process' + token[:type].to_s), token)
    end

    return @outputTokens
  end

  def processDoctype(token)
    @outputTokens.push(["DOCTYPE", token[:name], token[:publicId],
      token[:systemId], token[:correct]])
  end

  def processStartTag(token)
    @outputTokens.push(["StartTag", token[:name], token[:data]])
  end

  def processEmptyTag(token)
    if not HTML5::VOID_ELEMENTS.include? token[:name]
      @outputTokens.push("ParseError")
    end
    @outputTokens.push(["StartTag", token[:name], token[:data]])
  end

  def processEndTag(token)
    if token[:data].length > 0
      self.processParseError(token)
    end
    @outputTokens.push(["EndTag", token[:name]])
  end

  def processComment(token)
    @outputTokens.push(["Comment", token[:data]])
  end

  def processCharacters(token)
    @outputTokens.push(["Character", token[:data]])
  end

  alias processSpaceCharacters processCharacters

  def processCharacters(token)
    @outputTokens.push(["Character", token[:data]])
  end

  def process_eof(token)
  end

  def processParseError(token)
    @outputTokens.push("ParseError")
  end
end