29 int max_time,
int batch_size) {
37 function(batch_size, args);
42 int64_t iters_guess = (target_cycles / sw.
ElapsedTime()) * batch_size;
50 batch_size = (iters_guess - iters) / 5;
55 function(batch_size, args);
61 return iters / ms_elapsed;
66 LOG(ERROR) <<
"WARNING: Running benchmark in DEBUG mode.";
72 if (baseline_idx == -1) baseline_idx =
benchmarks_.size();
77 benchmark.args = args;
78 benchmark.baseline_idx = baseline_idx;
94 int function_out_width = 30;
95 int rate_out_width = 20;
96 int comparison_out_width = 20;
98 int total_width = function_out_width + rate_out_width + comparison_out_width +
padding;
101 << setw(function_out_width -
name_.size() - 1) <<
"Function"
102 << setw(rate_out_width) <<
"Rate (iters/ms)"
103 << setw(comparison_out_width) <<
"Comparison" << endl;
104 for (
int i = 0; i < total_width; ++i) {
109 int previous_baseline_idx = -1;
112 if (previous_baseline_idx !=
benchmarks_[i].baseline_idx && i > 0) ss << endl;
113 ss << setw(function_out_width) <<
benchmarks_[i].name
114 << setw(rate_out_width) << setprecision(4) <<
benchmarks_[i].rate
115 << setw(comparison_out_width - 1) << setprecision(4)
116 << (
benchmarks_[i].rate / base_line) <<
"X" << endl;
117 previous_baseline_idx =
benchmarks_[i].baseline_idx;
int AddBenchmark(const std::string &name, BenchmarkFunction fn, void *args, int baseline_idx=0)
uint64_t ElapsedTime() const
Returns time in cpu ticks.
static std::string GetMachineInfo()
Output machine/build configuration as a string.
std::string Measure()
Runs all the benchmarks and returns the result in a formatted string.
std::vector< BenchmarkResult > benchmarks_
static std::string model_name()
Returns the model name of the cpu (e.g. Intel i7-2600)
Benchmark(const std::string &name)
Name of the microbenchmark. This is outputted in the result.
static int64_t cycles_per_ms()
Returns the number of cpu cycles per millisecond.
uint8_t padding[64-sizeof(int)]