File: 13slowleak.t

package info (click to toggle)
libwww-curl-perl 4.17-12
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 584 kB
  • sloc: perl: 1,909; makefile: 5
file content (42 lines) | stat: -rw-r--r-- 1,094 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#!perl

use strict;
use warnings;
#use Test::More tests => 214;
use Test::More skip_all => "Not performing slow leakage regression test";

BEGIN { use_ok( 'WWW::Curl::Easy' ); }

my $url = $ENV{CURL_TEST_URL} || "http://www.google.com";

# There was a slow leak per curl handle init/cleanup. Hopefully fixed.

foreach my $j (1..200) {

# Init the curl session
my $curl = WWW::Curl::Easy->new() or die "cannot curl";

$curl->setopt(CURLOPT_NOPROGRESS, 1);
$curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
$curl->setopt(CURLOPT_TIMEOUT, 30);

open (HEAD, "+>",undef);
WWW::Curl::Easy::setopt($curl, CURLOPT_WRITEHEADER, \*HEAD);
open (BODY, "+>, undef);
WWW::Curl::Easy::setopt($curl, CURLOPT_FILE, \*BODY);

$curl->setopt(CURLOPT_URL, $url);
                                                                        
my $httpcode = 0;

my $retcode=$curl->perform();
if ($retcode == 0) {
	my bytes=$curl->getinfo(CURLINFO_SIZE_DOWNLOAD);
	my $realurl=$curl->getinfo(CURLINFO_EFFECTIVE_URL);
	my $httpcode=$curl->getinfo(CURLINFO_HTTP_CODE);
} else {
	print "not ok $retcode / ".$curl->errbuf."\n";
} 

}