File: insert_times_inc_files.pl

package info (click to toggle)
dpm-postgres 1.7.4.7-1
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 13,788 kB
  • ctags: 10,782
  • sloc: ansic: 146,136; sh: 13,362; perl: 11,142; python: 5,529; cpp: 5,113; sql: 1,790; makefile: 955; fortran: 113
file content (68 lines) | stat: -rwxr-xr-x 2,180 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#!/usr/bin/perl


#
# Performance test : gets average insert times for 
# varying number of entries. 
#
use strict;
use warnings;
use Getopt::Long;

use FindBin;

# run the "create_files" command with the different number of files
# and get the times back in a file

my @files = (1, 10, 100, 1000, 10000, 100000);
my ($num_files, $result);

my $file = "results/insert/insert-times-inc-files.dat";
open(OUTFILE, '>', "$file") or die "Can't open $file: $!\n";

print OUTFILE "num_files, total insert time (ms), ave insert time(ms), mean insert time (ms), thread_ave (ms), thread_file_ave (ms) \n";

my($sec, $min, $hour, $day, $mon, $year, @rest) = localtime(time);
$year += 1900;
my $timestamp = "$year-$mon-$day-$hour:$min:$sec";
my @data;
my $num_threads = 1;

foreach $num_files (@files) {

	my $filename = "results/insert/insert-$num_files-files.dat";
	`./create_files -d /grid/dteam/caitriana/test2/insert/$timestamp-$num_files -f $num_files > $filename`;
  	open(INFILE, "$filename") or die "Can't open $filename: $!\n";
        my $run_time = 0;
        my $thread_time = 0;
        my $j = 0;
        while (<INFILE>) {
        	chomp;
                @data = split /\s+/, $_;
                if ($data[1] eq "TOTAL") {
                	$result = $data[2]/1000;
                }
               	elsif ($data[1] eq "THREAD") {
                	$thread_time += $data[2]/1000;
                }
               	else {
                 	$run_time += $data[1]/1000;
                        $j++;   
              	}
        }
        close INFILE;
	my $ave_run = $run_time / $j;
	my $average = $result / $num_files;
	my $ave_thread = $thread_time / $num_threads;  
	my $ave_file_perthread = $ave_thread / $num_files;
	$ave_run = (sprintf("%.2f",$ave_run));
	$average = (sprintf("%.2f",$average));
	$ave_thread = (sprintf("%.2f",$ave_thread));
	$ave_file_perthread = (sprintf("%.2f",$ave_file_perthread));
	print OUTFILE "$num_files \t $result \t $average \t $ave_run \t $ave_thread \t $ave_file_perthread\n";

  # delete the files that have just been produced, before testing with a different number of files
  `nsrm -rf /grid/dteam/caitriana/test2/insert/`;
}

close OUTFILE;