File: insert_rate.pl

package info (click to toggle)
dpm-postgres 1.7.4.7-1
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 13,788 kB
  • ctags: 10,782
  • sloc: ansic: 146,136; sh: 13,362; perl: 11,142; python: 5,529; cpp: 5,113; sql: 1,790; makefile: 955; fortran: 113
file content (85 lines) | stat: -rwxr-xr-x 2,515 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
#!/usr/bin/perl

#
# Performance test : creates files for different number of files and threads.
# A fixed number of files (currently set to 5000) are inserted and the
# insert rate calculated. This is repeated 5 times for each reading.
#

use strict;
use warnings;
use Getopt::Long;

use FindBin;

# First do it using absolute pathnames
my $outfile = "results/insert/insert-rate-rel-notrans.dat";
get_results($outfile, "yes", 0, "no", 0);

# then with relative pathnames
$outfile = "results/insert/insert-rate-rel-trans.dat";
#get_results($outfile, "yes", 0, "yes", 100);

# the subroutine for running the command with the given parameters.

sub get_results {

  my($file, $relative, $depth, $transactions, $commit_size) = @_;
  
  my $optargs = "";
  if ($relative eq "yes") {
    $optargs = $optargs."-r ";
  }
  if ($depth > 0) {
    $optargs = $optargs."-n $depth ";
  }
  if ($transactions eq "yes") {
    $optargs = $optargs."-x ";
  }
  if ($commit_size > 0) {
    $optargs = $optargs."-c $commit_size ";
  }
  
  # run the "create_files_rate" command with the different number of files and threads
  # and get the times back in a file
  
  my @threads = (1);
  my ($num_threads, $result);
  
  open(OUTFILE, '>', "$file") or die "Can't open $file: $!\n";
  
  print OUTFILE "num_threads, total_files, rate (inserts/sec)\n";
  
  my($sec, $min, $hour, $day, $mon, $year, @rest) = localtime(time);
  $year += 1900;
  my $timestamp = "$year-$mon-$day-$hour:$min:$sec";
  my $i = 0;
  my $total_files = 1000;
  my @data; 
  foreach $num_threads (@threads) {
    $i = 0;
    while ($i < 5) {
      my $filename =  "results/insert/$num_threads-thread-insert-rate-$i.dat";
      `./create_files_rate -d /grid/caitriana/test/$timestamp-$num_threads -f $total_files -t $num_threads $optargs > $filename`;
      open(INFILE, "$filename") or die "Can't open $filename: $!\n";
      my $start_time = 0;
      my $end_time = 0;
      my $num_files = 0;
      while (<INFILE>) {
        chomp;
	@data = split /\s+/, $_;
	$start_time = $data[1];
	$end_time = $data[2];
	$num_files = $data[3];
      }
      close INFILE;
      my $rate = 1000000*$num_files/($end_time-$start_time);
      print OUTFILE "$num_threads \t $start_time \t $end_time \t $num_files \t $rate\n";
      `lfc-rm -rf /grid/caitriana/test/$timestamp-$num_threads`;
      $i+=1;
    }
    # delete the files that have just been produced, before testing with a different number of threads
    `lfc-rm -rf /grid/caitriana/test/`;
  }
  close OUTFILE;
}