File: test_cfm.py

package info (click to toggle)
ruby-pygments.rb 0.6.3-2%2Bdeb9u1
  • links: PTS, VCS
  • area: main
  • in suites: stretch
  • size: 8,628 kB
  • sloc: python: 46,993; ansic: 8,115; lisp: 3,703; cobol: 2,961; pascal: 2,750; ruby: 2,700; sh: 2,362; java: 1,755; cpp: 1,549; haskell: 926; ml: 831; csh: 681; f90: 459; php: 260; cs: 258; perl: 177; makefile: 174; ada: 161; objc: 145; erlang: 104; awk: 94; asm: 68; jsp: 21
file content (46 lines) | stat: -rw-r--r-- 1,433 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# -*- coding: utf-8 -*-
"""
    Basic ColdfusionHtmlLexer Test
    ~~~~~~~~~~~~~~~~~

    :copyright: Copyright 2014 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

import unittest
import os

from pygments.token import Token
from pygments.lexers import ColdfusionHtmlLexer


class ColdfusionHtmlLexerTest(unittest.TestCase):

    def setUp(self):
        self.lexer = ColdfusionHtmlLexer()

    def testBasicComment(self):
        fragment = u'<!--- cfcomment --->'
        expected = [
            (Token.Text, u''),
            (Token.Comment.Multiline, u'<!---'),
            (Token.Comment.Multiline, u' cfcomment '),
            (Token.Comment.Multiline, u'--->'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))

    def testNestedComment(self):
        fragment = u'<!--- nested <!--- cfcomment ---> --->'
        expected = [
            (Token.Text, u''),
            (Token.Comment.Multiline, u'<!---'),
            (Token.Comment.Multiline, u' nested '),
            (Token.Comment.Multiline, u'<!---'),
            (Token.Comment.Multiline, u' cfcomment '),
            (Token.Comment.Multiline, u'--->'),
            (Token.Comment.Multiline, u' '),
            (Token.Comment.Multiline, u'--->'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))