bin/bench
#!/usr/bin/env ruby
# ActiveModelSerializers Benchmark driver
# Adapted from
# https://github.com/ruby-bench/ruby-bench-suite/blob/8ad567f7e43a044ae48c36833218423bb1e2bd9d/rails/benchmarks/driver.rb
require 'bundler'
Bundler.setup
require 'json'
require 'pathname'
require 'optparse'
require 'digest'
require 'pathname'
require 'shellwords'
require 'logger'
require 'English'
class BenchmarkDriver
ROOT = Pathname File.expand_path(File.join('..', '..'), __FILE__)
BASE = ENV.fetch('BASE') { ROOT.join('test', 'benchmark') }
ESCAPED_BASE = Shellwords.shellescape(BASE)
def self.benchmark(options)
new(options).run
end
def self.parse_argv_and_run(argv = ARGV, options = {})
options = {
repeat_count: 1,
pattern: [],
env: 'CACHE_ON=on'
}.merge!(options)
OptionParser.new do |opts|
opts.banner = 'Usage: bin/bench [options]'
opts.on('-r', '--repeat-count [NUM]', 'Run benchmarks [NUM] times taking the best result') do |value|
options[:repeat_count] = value.to_i
end
opts.on('-p', '--pattern <PATTERN1,PATTERN2,PATTERN3>', 'Benchmark name pattern') do |value|
options[:pattern] = value.split(',')
end
opts.on('-e', '--env <var1=val1,var2=val2,var3=vale>', 'ENV variables to pass in') do |value|
options[:env] = value.split(',')
end
end.parse!(argv)
benchmark(options)
end
attr_reader :commit_hash, :base
# Based on logfmt:
# https://www.brandur.org/logfmt
# For more complete implementation see:
# see https://github.com/arachnid-cb/logfmtr/blob/master/lib/logfmtr/base.rb
# For usage see:
# https://blog.codeship.com/logfmt-a-log-format-thats-easy-to-read-and-write/
# https://engineering.heroku.com/blogs/2014-09-05-hutils-explore-your-structured-data-logs/
# For Ruby parser see:
# https://github.com/cyberdelia/logfmt-ruby
def self.summary_logger(device = 'output.txt')
require 'time'
logger = Logger.new(device)
logger.level = Logger::INFO
logger.formatter = proc { |severity, datetime, progname, msg|
msg = "'#{msg}'"
"level=#{severity} time=#{datetime.utc.iso8601(6)} pid=#{Process.pid} progname=#{progname} msg=#{msg}#{$INPUT_RECORD_SEPARATOR}"
}
logger
end
def self.stdout_logger
logger = Logger.new(STDOUT)
logger.level = Logger::INFO
logger.formatter = proc { |_, _, _, msg| "#{msg}#{$INPUT_RECORD_SEPARATOR}" }
logger
end
def initialize(options)
@writer = ENV['SUMMARIZE'] ? self.class.summary_logger : self.class.stdout_logger
@repeat_count = options[:repeat_count]
@pattern = options[:pattern]
@commit_hash = options.fetch(:commit_hash) { `git rev-parse --short HEAD`.chomp }
@base = options.fetch(:base) { ESCAPED_BASE }
@env = Array(options[:env]).join(' ')
@rubyopt = options[:rubyopt] # TODO: rename
end
def run
files.each do |path|
next if !@pattern.empty? && /#{@pattern.join('|')}/ !~ File.basename(path)
run_single(Shellwords.shellescape(path))
end
end
private
def files
Dir[File.join(base, 'bm_*')]
end
def run_single(path)
script = "RAILS_ENV=production #{@env} ruby #{@rubyopt} #{path}"
environment = `ruby -v`.chomp.strip[/\d+\.\d+\.\d+\w+/]
runs_output = measure(script)
if runs_output.empty?
results = { error: :no_results }
return
end
results = {}
results['commit_hash'] = commit_hash
results['version'] = runs_output.first['version']
results['rails_version'] = runs_output.first['rails_version']
results['benchmark_run[environment]'] = environment
results['runs'] = []
runs_output.each do |output|
results['runs'] << {
'benchmark_type[category]' => output['label'],
'benchmark_run[result][iterations_per_second]' => output['iterations_per_second'].round(3),
'benchmark_run[result][total_allocated_objects_per_iteration]' => output['total_allocated_objects_per_iteration']
}
end
ensure
results && report(results)
end
def report(results)
@writer.info { 'Benchmark results:' }
@writer.info { JSON.pretty_generate(results) }
end
def summarize(result)
puts "#{result['label']} #{result['iterations_per_second']}/ips; #{result['total_allocated_objects_per_iteration']} objects"
end
# FIXME: ` provides the full output but it'll return failed output as well.
def measure(script)
results = Hash.new { |h, k| h[k] = [] }
@repeat_count.times do
output = sh(script)
output.each_line do |line|
next if line.nil?
begin
result = JSON.parse(line)
rescue JSON::ParserError
result = { error: line } # rubocop:disable Lint/UselessAssignment
else
summarize(result)
results[result['label']] << result
end
end
end
results.map do |_, bm_runs|
bm_runs.sort_by do |run|
run['iterations_per_second']
end.last
end
end
def sh(cmd)
`#{cmd}`
end
end
BenchmarkDriver.parse_argv_and_run if $PROGRAM_NAME == __FILE__