1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
|
use strict;
use warnings;
use Compiler::Lexer;
use Test::More;
use Data::Dumper;
my $tokens = Compiler::Lexer->new->tokenize('not /\d/');
is_deeply($tokens, [
bless( {
'kind' => Compiler::Lexer::Kind::T_SingleTerm,
'has_warnings' => 0,
'stype' => 0,
'name' => 'AlphabetNot',
'data' => 'not',
'type' => Compiler::Lexer::TokenType::T_AlphabetNot,
'line' => 1
}, 'Compiler::Lexer::Token' ),
bless( {
'kind' => Compiler::Lexer::Kind::T_Term,
'has_warnings' => 0,
'stype' => 0,
'name' => 'RegDelim',
'data' => '/',
'type' => Compiler::Lexer::TokenType::T_RegDelim,
'line' => 1
}, 'Compiler::Lexer::Token' ),
bless( {
'kind' => Compiler::Lexer::Kind::T_Term,
'has_warnings' => 0,
'stype' => 0,
'name' => 'RegExp',
'data' => '\\d',
'type' => Compiler::Lexer::TokenType::T_RegExp,
'line' => 1
}, 'Compiler::Lexer::Token' ),
bless( {
'kind' => Compiler::Lexer::Kind::T_Term,
'has_warnings' => 0,
'stype' => 0,
'name' => 'RegDelim',
'data' => '/',
'type' => Compiler::Lexer::TokenType::T_RegDelim,
'line' => 1
}, 'Compiler::Lexer::Token' )
]);
done_testing;
|