1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
|
require 'benchmark'
require 'tmpdir'
require 'csv'
namespace :benchmark do
def generate_scenario_tasks(location, name)
desc File.read(File.join(location, 'description'))
task name => "#{name}:run"
# Load a BenchmarkerTask to handle config of the benchmark
task_handler_file = File.expand_path(File.join(location, 'benchmarker_task.rb'))
if File.exist?(task_handler_file)
require task_handler_file
run_args = BenchmarkerTask.run_args
else
run_args = []
end
namespace name do
task :setup do
ENV['ITERATIONS'] ||= '10'
ENV['SIZE'] ||= '100'
ENV['TARGET'] ||= Dir.mktmpdir(name)
ENV['TARGET'] = File.expand_path(ENV['TARGET'])
mkdir_p(ENV['TARGET'])
require File.expand_path(File.join(location, 'benchmarker.rb'))
@benchmark = Benchmarker.new(ENV['TARGET'], ENV['SIZE'].to_i)
end
task :generate => :setup do
@benchmark.generate
@benchmark.setup
end
desc "Run the #{name} scenario."
task :run, [*run_args] => :generate do |_, args|
format = if RUBY_VERSION =~ /^1\.8/
Benchmark::FMTSTR
else
Benchmark::FORMAT
end
report = []
details = []
Benchmark.benchmark(Benchmark::CAPTION, 10, format, "> total:", "> avg:") do |b|
times = []
ENV['ITERATIONS'].to_i.times do |i|
start_time = Time.now.to_i
times << b.report("Run #{i + 1}") do
details << @benchmark.run(args)
end
report << [to_millis(start_time), to_millis(times.last.real), 200, true, name]
end
sum = times.inject(Benchmark::Tms.new, &:+)
[sum, sum / times.length]
end
write_csv("#{name}.samples",
%w{timestamp elapsed responsecode success name},
report)
# report details, if any were produced
if details[0].is_a?(Array) && details[0][0].is_a?(Benchmark::Tms)
# assume all entries are Tms if the first is
# turn each into a hash of label => tms (since labels are lost when doing arithmetic on Tms)
hashed = details.reduce([]) do |memo, measures|
memo << measures.reduce({}) {|memo2, measure| memo2[measure.label] = measure; memo2}
memo
end
# sum across all hashes
result = {}
hashed_totals = hashed.reduce {|memo, h| memo.merge(h) {|k, old, new| old + new }}
# average the totals
hashed_totals.keys.each {|k| hashed_totals[k] /= details.length }
min_width = 14
max_width = (hashed_totals.keys.map(&:length) << min_width).max
puts "\n"
puts sprintf("%2$*1$s %3$s", -max_width, 'Details (avg)', " user system total real")
puts "-" * (46 + max_width)
hashed_totals.sort.each {|k,v| puts sprintf("%2$*1$s %3$s", -max_width, k, v.format) }
end
end
desc "Profile a single run of the #{name} scenario."
task :profile, [:warm_up_runs, *run_args] => :generate do |_, args|
warm_up_runs = (args[:warm_up_runs] || '0').to_i
warm_up_runs.times do
@benchmark.run(args)
end
require 'ruby-prof'
result = RubyProf.profile do
@benchmark.run(args)
end
printer = RubyProf::CallTreePrinter.new(result)
File.open(File.join("callgrind.#{name}.#{Time.now.to_i}.trace"), "w") do |f|
printer.print(f)
end
end
def to_millis(seconds)
(seconds * 1000).round
end
def write_csv(file, header, data)
CSV.open(file, 'w') do |csv|
csv << header
data.each do |line|
csv << line
end
end
end
end
end
scenarios = []
Dir.glob('benchmarks/*') do |location|
name = File.basename(location)
scenarios << name
generate_scenario_tasks(location, File.basename(location))
end
namespace :all do
desc "Profile all of the scenarios. (#{scenarios.join(', ')})"
task :profile do
scenarios.each do |name|
sh "rake benchmark:#{name}:profile"
end
end
desc "Run all of the scenarios. (#{scenarios.join(', ')})"
task :run do
scenarios.each do |name|
sh "rake benchmark:#{name}:run"
end
end
end
end
|