1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114
|
#!perl
use strict;
use warnings;
use Data::Dumper;
use File::Spec;
use Devel::Peek;
use lib File::Spec->catdir(qw(t lib));
BEGIN {
lib->import('lib')
if !-d 't';
}
use Sereal::TestSet qw(:all);
use Test::More tests => 8 + ( 2 * 6 ) + ( 31 * 3 );
use Sereal::Decoder qw(decode_sereal);
use Sereal::Decoder::Constants qw(:all);
# Simple test to see whether we can get the number of bytes consumed
# and whether offset works
SCOPE: {
my $d= Sereal::Decoder->new();
my $data= SRL_MAGIC_STRING . chr(1) . chr(0) . chr(SRL_HDR_UNDEF);
ok( !defined( $d->decode( $data . "GARBAGE" ) ), "can decode with appended garbage" );
is( $d->bytes_consumed, length($data), "consumed right number of bytes" );
ok( !defined( $d->decode_with_offset( $data, 0 ) ), "can decode with zero offset" );
is( $d->bytes_consumed, length($data), "consumed right number of bytes" );
ok(
!defined( $d->decode_with_offset( "GARBAGE" . $data, length("GARBAGE") ) ),
"can decode with offset"
);
is( $d->bytes_consumed, length($data), "consumed right number of bytes" );
ok(
!defined( $d->decode_with_offset( "GARBAGE" . $data . "TRAILING", length("GARBAGE") ) ),
"can decode with offset and trailing garbage"
);
is( $d->bytes_consumed, length($data), "consumed right number of bytes" );
}
SCOPE: {
my $d= Sereal::Decoder->new( { incremental => 1 } );
my $data= '';
$data .= SRL_MAGIC_STRING . chr(1) . chr(0) . chr( SRL_HDR_POS | $_ ) for 1 .. 5;
for ( 1 .. 5 ) {
my $out= $d->decode($data);
is( "$out", "$_", "Incremental section no. $_ yields right output" );
}
is( $data, '', "Data is gone after incremental parsing" );
}
SCOPE: {
my $d= Sereal::Decoder->new( { incremental => 1 } );
my $data= '';
$data .= SRL_MAGIC_STRING . chr(1) . chr(0) . chr( SRL_HDR_POS | $_ ) for 1 .. 5;
utf8::upgrade($data);
for ( 1 .. 5 ) {
my $out= $d->decode($data);
is( "$out", "$_", "Incremental section no. $_ yields right output utf8 mode" );
}
is( $data, '', "Data is gone after incremental parsing utf8 mode" );
}
SKIP: {
my $have_enc= have_encoder_and_decoder();
if ( not $have_enc ) {
skip "Need encoder for chunk tests", 31 * 3;
}
else {
require Sereal::Encoder;
Sereal::Encoder->import( "encode_sereal", "SRL_ZLIB" );
for my $tuple (
[ raw => [] ],
[ snappy_incr => [ { snappy_incr => 1 } ] ],
[ zlib => [ { compress => SRL_ZLIB() } ] ] )
{
my ( $name, $opts )= @$tuple;
my $data;
my $n= 30;
$data .= encode_sereal( $_, @$opts ) for 1 .. $n;
my $decoder= Sereal::Decoder->new;
my @out;
my $pos= 0;
my $ok= eval {
while (1) {
push @out, $decoder->decode_with_offset( $data, $pos );
$pos += $decoder->bytes_consumed;
last
if $pos >= length($data)
or not $decoder->bytes_consumed;
}
1;
};
my $err= $@ || 'Zombie error';
ok( $ok, "incremental decoder ($name) had no hissy fit" )
or note( "Error: $err. Data structures decoded up to that point:\n"
. Data::Dumper::Dumper( \@out ) );
is(
$out[ $_ - 1 ], $_,
"Decoding multiple packets from single string works ($name: $_)"
) for 1 .. $n;
}
}
}
|