mirror of
https://github.com/google/benchmark.git
synced 2024-11-27 20:44:27 +00:00
prefix macros to avoid clashes (#1186)
This commit is contained in:
parent
5da5660429
commit
6a5bf081d3
|
@ -58,71 +58,71 @@
|
||||||
|
|
||||||
namespace benchmark {
|
namespace benchmark {
|
||||||
// Print a list of benchmarks. This option overrides all other options.
|
// Print a list of benchmarks. This option overrides all other options.
|
||||||
DEFINE_bool(benchmark_list_tests, false);
|
BM_DEFINE_bool(benchmark_list_tests, false);
|
||||||
|
|
||||||
// A regular expression that specifies the set of benchmarks to execute. If
|
// A regular expression that specifies the set of benchmarks to execute. If
|
||||||
// this flag is empty, or if this flag is the string \"all\", all benchmarks
|
// this flag is empty, or if this flag is the string \"all\", all benchmarks
|
||||||
// linked into the binary are run.
|
// linked into the binary are run.
|
||||||
DEFINE_string(benchmark_filter, ".");
|
BM_DEFINE_string(benchmark_filter, ".");
|
||||||
|
|
||||||
// Minimum number of seconds we should run benchmark before results are
|
// Minimum number of seconds we should run benchmark before results are
|
||||||
// considered significant. For cpu-time based tests, this is the lower bound
|
// considered significant. For cpu-time based tests, this is the lower bound
|
||||||
// on the total cpu time used by all threads that make up the test. For
|
// on the total cpu time used by all threads that make up the test. For
|
||||||
// real-time based tests, this is the lower bound on the elapsed time of the
|
// real-time based tests, this is the lower bound on the elapsed time of the
|
||||||
// benchmark execution, regardless of number of threads.
|
// benchmark execution, regardless of number of threads.
|
||||||
DEFINE_double(benchmark_min_time, 0.5);
|
BM_DEFINE_double(benchmark_min_time, 0.5);
|
||||||
|
|
||||||
// The number of runs of each benchmark. If greater than 1, the mean and
|
// The number of runs of each benchmark. If greater than 1, the mean and
|
||||||
// standard deviation of the runs will be reported.
|
// standard deviation of the runs will be reported.
|
||||||
DEFINE_int32(benchmark_repetitions, 1);
|
BM_DEFINE_int32(benchmark_repetitions, 1);
|
||||||
|
|
||||||
// If set, enable random interleaving of repetitions of all benchmarks.
|
// If set, enable random interleaving of repetitions of all benchmarks.
|
||||||
// See http://github.com/google/benchmark/issues/1051 for details.
|
// See http://github.com/google/benchmark/issues/1051 for details.
|
||||||
DEFINE_bool(benchmark_enable_random_interleaving, false);
|
BM_DEFINE_bool(benchmark_enable_random_interleaving, false);
|
||||||
|
|
||||||
// Report the result of each benchmark repetitions. When 'true' is specified
|
// Report the result of each benchmark repetitions. When 'true' is specified
|
||||||
// only the mean, standard deviation, and other statistics are reported for
|
// only the mean, standard deviation, and other statistics are reported for
|
||||||
// repeated benchmarks. Affects all reporters.
|
// repeated benchmarks. Affects all reporters.
|
||||||
DEFINE_bool(benchmark_report_aggregates_only, false);
|
BM_DEFINE_bool(benchmark_report_aggregates_only, false);
|
||||||
|
|
||||||
// Display the result of each benchmark repetitions. When 'true' is specified
|
// Display the result of each benchmark repetitions. When 'true' is specified
|
||||||
// only the mean, standard deviation, and other statistics are displayed for
|
// only the mean, standard deviation, and other statistics are displayed for
|
||||||
// repeated benchmarks. Unlike benchmark_report_aggregates_only, only affects
|
// repeated benchmarks. Unlike benchmark_report_aggregates_only, only affects
|
||||||
// the display reporter, but *NOT* file reporter, which will still contain
|
// the display reporter, but *NOT* file reporter, which will still contain
|
||||||
// all the output.
|
// all the output.
|
||||||
DEFINE_bool(benchmark_display_aggregates_only, false);
|
BM_DEFINE_bool(benchmark_display_aggregates_only, false);
|
||||||
|
|
||||||
// The format to use for console output.
|
// The format to use for console output.
|
||||||
// Valid values are 'console', 'json', or 'csv'.
|
// Valid values are 'console', 'json', or 'csv'.
|
||||||
DEFINE_string(benchmark_format, "console");
|
BM_DEFINE_string(benchmark_format, "console");
|
||||||
|
|
||||||
// The format to use for file output.
|
// The format to use for file output.
|
||||||
// Valid values are 'console', 'json', or 'csv'.
|
// Valid values are 'console', 'json', or 'csv'.
|
||||||
DEFINE_string(benchmark_out_format, "json");
|
BM_DEFINE_string(benchmark_out_format, "json");
|
||||||
|
|
||||||
// The file to write additional output to.
|
// The file to write additional output to.
|
||||||
DEFINE_string(benchmark_out, "");
|
BM_DEFINE_string(benchmark_out, "");
|
||||||
|
|
||||||
// Whether to use colors in the output. Valid values:
|
// Whether to use colors in the output. Valid values:
|
||||||
// 'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use colors if
|
// 'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use colors if
|
||||||
// the output is being sent to a terminal and the TERM environment variable is
|
// the output is being sent to a terminal and the TERM environment variable is
|
||||||
// set to a terminal type that supports colors.
|
// set to a terminal type that supports colors.
|
||||||
DEFINE_string(benchmark_color, "auto");
|
BM_DEFINE_string(benchmark_color, "auto");
|
||||||
|
|
||||||
// Whether to use tabular format when printing user counters to the console.
|
// Whether to use tabular format when printing user counters to the console.
|
||||||
// Valid values: 'true'/'yes'/1, 'false'/'no'/0. Defaults to false.
|
// Valid values: 'true'/'yes'/1, 'false'/'no'/0. Defaults to false.
|
||||||
DEFINE_bool(benchmark_counters_tabular, false);
|
BM_DEFINE_bool(benchmark_counters_tabular, false);
|
||||||
|
|
||||||
// List of additional perf counters to collect, in libpfm format. For more
|
// List of additional perf counters to collect, in libpfm format. For more
|
||||||
// information about libpfm: https://man7.org/linux/man-pages/man3/libpfm.3.html
|
// information about libpfm: https://man7.org/linux/man-pages/man3/libpfm.3.html
|
||||||
DEFINE_string(benchmark_perf_counters, "");
|
BM_DEFINE_string(benchmark_perf_counters, "");
|
||||||
|
|
||||||
// Extra context to include in the output formatted as comma-separated key-value
|
// Extra context to include in the output formatted as comma-separated key-value
|
||||||
// pairs. Kept internal as it's only used for parsing from env/command line.
|
// pairs. Kept internal as it's only used for parsing from env/command line.
|
||||||
DEFINE_kvpairs(benchmark_context, {});
|
BM_DEFINE_kvpairs(benchmark_context, {});
|
||||||
|
|
||||||
// The level of verbose logging to output
|
// The level of verbose logging to output
|
||||||
DEFINE_int32(v, 0);
|
BM_DEFINE_int32(v, 0);
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
@ -151,8 +151,9 @@ State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
|
||||||
timer_(timer),
|
timer_(timer),
|
||||||
manager_(manager),
|
manager_(manager),
|
||||||
perf_counters_measurement_(perf_counters_measurement) {
|
perf_counters_measurement_(perf_counters_measurement) {
|
||||||
CHECK(max_iterations != 0) << "At least one iteration must be run";
|
BM_CHECK(max_iterations != 0) << "At least one iteration must be run";
|
||||||
CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
|
BM_CHECK_LT(thread_index, threads)
|
||||||
|
<< "thread_index must be less than threads";
|
||||||
|
|
||||||
// Note: The use of offsetof below is technically undefined until C++17
|
// Note: The use of offsetof below is technically undefined until C++17
|
||||||
// because State is not a standard layout type. However, all compilers
|
// because State is not a standard layout type. However, all compilers
|
||||||
|
@ -181,21 +182,21 @@ State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
|
||||||
|
|
||||||
void State::PauseTiming() {
|
void State::PauseTiming() {
|
||||||
// Add in time accumulated so far
|
// Add in time accumulated so far
|
||||||
CHECK(started_ && !finished_ && !error_occurred_);
|
BM_CHECK(started_ && !finished_ && !error_occurred_);
|
||||||
timer_->StopTimer();
|
timer_->StopTimer();
|
||||||
if (perf_counters_measurement_) {
|
if (perf_counters_measurement_) {
|
||||||
auto measurements = perf_counters_measurement_->StopAndGetMeasurements();
|
auto measurements = perf_counters_measurement_->StopAndGetMeasurements();
|
||||||
for (const auto& name_and_measurement : measurements) {
|
for (const auto& name_and_measurement : measurements) {
|
||||||
auto name = name_and_measurement.first;
|
auto name = name_and_measurement.first;
|
||||||
auto measurement = name_and_measurement.second;
|
auto measurement = name_and_measurement.second;
|
||||||
CHECK_EQ(counters[name], 0.0);
|
BM_CHECK_EQ(counters[name], 0.0);
|
||||||
counters[name] = Counter(measurement, Counter::kAvgIterations);
|
counters[name] = Counter(measurement, Counter::kAvgIterations);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void State::ResumeTiming() {
|
void State::ResumeTiming() {
|
||||||
CHECK(started_ && !finished_ && !error_occurred_);
|
BM_CHECK(started_ && !finished_ && !error_occurred_);
|
||||||
timer_->StartTimer();
|
timer_->StartTimer();
|
||||||
if (perf_counters_measurement_) {
|
if (perf_counters_measurement_) {
|
||||||
perf_counters_measurement_->Start();
|
perf_counters_measurement_->Start();
|
||||||
|
@ -203,7 +204,7 @@ void State::ResumeTiming() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void State::SkipWithError(const char* msg) {
|
void State::SkipWithError(const char* msg) {
|
||||||
CHECK(msg);
|
BM_CHECK(msg);
|
||||||
error_occurred_ = true;
|
error_occurred_ = true;
|
||||||
{
|
{
|
||||||
MutexLock l(manager_->GetBenchmarkMutex());
|
MutexLock l(manager_->GetBenchmarkMutex());
|
||||||
|
@ -226,7 +227,7 @@ void State::SetLabel(const char* label) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void State::StartKeepRunning() {
|
void State::StartKeepRunning() {
|
||||||
CHECK(!started_ && !finished_);
|
BM_CHECK(!started_ && !finished_);
|
||||||
started_ = true;
|
started_ = true;
|
||||||
total_iterations_ = error_occurred_ ? 0 : max_iterations;
|
total_iterations_ = error_occurred_ ? 0 : max_iterations;
|
||||||
manager_->StartStopBarrier();
|
manager_->StartStopBarrier();
|
||||||
|
@ -234,7 +235,7 @@ void State::StartKeepRunning() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void State::FinishKeepRunning() {
|
void State::FinishKeepRunning() {
|
||||||
CHECK(started_ && (!finished_ || error_occurred_));
|
BM_CHECK(started_ && (!finished_ || error_occurred_));
|
||||||
if (!error_occurred_) {
|
if (!error_occurred_) {
|
||||||
PauseTiming();
|
PauseTiming();
|
||||||
}
|
}
|
||||||
|
@ -282,7 +283,7 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
|
||||||
BenchmarkReporter* display_reporter,
|
BenchmarkReporter* display_reporter,
|
||||||
BenchmarkReporter* file_reporter) {
|
BenchmarkReporter* file_reporter) {
|
||||||
// Note the file_reporter can be null.
|
// Note the file_reporter can be null.
|
||||||
CHECK(display_reporter != nullptr);
|
BM_CHECK(display_reporter != nullptr);
|
||||||
|
|
||||||
// Determine the width of the name field using a minimum width of 10.
|
// Determine the width of the name field using a minimum width of 10.
|
||||||
bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
|
bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
|
||||||
|
|
|
@ -111,7 +111,7 @@ void BenchmarkFamilies::ClearBenchmarks() {
|
||||||
bool BenchmarkFamilies::FindBenchmarks(
|
bool BenchmarkFamilies::FindBenchmarks(
|
||||||
std::string spec, std::vector<BenchmarkInstance>* benchmarks,
|
std::string spec, std::vector<BenchmarkInstance>* benchmarks,
|
||||||
std::ostream* ErrStream) {
|
std::ostream* ErrStream) {
|
||||||
CHECK(ErrStream);
|
BM_CHECK(ErrStream);
|
||||||
auto& Err = *ErrStream;
|
auto& Err = *ErrStream;
|
||||||
// Make regular expression out of command-line flag
|
// Make regular expression out of command-line flag
|
||||||
std::string error_msg;
|
std::string error_msg;
|
||||||
|
@ -225,7 +225,7 @@ Benchmark* Benchmark::Name(const std::string& name) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Arg(int64_t x) {
|
Benchmark* Benchmark::Arg(int64_t x) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
||||||
args_.push_back({x});
|
args_.push_back({x});
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -236,7 +236,7 @@ Benchmark* Benchmark::Unit(TimeUnit unit) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Range(int64_t start, int64_t limit) {
|
Benchmark* Benchmark::Range(int64_t start, int64_t limit) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
||||||
std::vector<int64_t> arglist;
|
std::vector<int64_t> arglist;
|
||||||
AddRange(&arglist, start, limit, range_multiplier_);
|
AddRange(&arglist, start, limit, range_multiplier_);
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ Benchmark* Benchmark::Range(int64_t start, int64_t limit) {
|
||||||
|
|
||||||
Benchmark* Benchmark::Ranges(
|
Benchmark* Benchmark::Ranges(
|
||||||
const std::vector<std::pair<int64_t, int64_t>>& ranges) {
|
const std::vector<std::pair<int64_t, int64_t>>& ranges) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(ranges.size()));
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(ranges.size()));
|
||||||
std::vector<std::vector<int64_t>> arglists(ranges.size());
|
std::vector<std::vector<int64_t>> arglists(ranges.size());
|
||||||
for (std::size_t i = 0; i < ranges.size(); i++) {
|
for (std::size_t i = 0; i < ranges.size(); i++) {
|
||||||
AddRange(&arglists[i], ranges[i].first, ranges[i].second,
|
AddRange(&arglists[i], ranges[i].first, ranges[i].second,
|
||||||
|
@ -262,7 +262,7 @@ Benchmark* Benchmark::Ranges(
|
||||||
|
|
||||||
Benchmark* Benchmark::ArgsProduct(
|
Benchmark* Benchmark::ArgsProduct(
|
||||||
const std::vector<std::vector<int64_t>>& arglists) {
|
const std::vector<std::vector<int64_t>>& arglists) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(arglists.size()));
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(arglists.size()));
|
||||||
|
|
||||||
std::vector<std::size_t> indices(arglists.size());
|
std::vector<std::size_t> indices(arglists.size());
|
||||||
const std::size_t total = std::accumulate(
|
const std::size_t total = std::accumulate(
|
||||||
|
@ -289,20 +289,20 @@ Benchmark* Benchmark::ArgsProduct(
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::ArgName(const std::string& name) {
|
Benchmark* Benchmark::ArgName(const std::string& name) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
||||||
arg_names_ = {name};
|
arg_names_ = {name};
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::ArgNames(const std::vector<std::string>& names) {
|
Benchmark* Benchmark::ArgNames(const std::vector<std::string>& names) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(names.size()));
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(names.size()));
|
||||||
arg_names_ = names;
|
arg_names_ = names;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::DenseRange(int64_t start, int64_t limit, int step) {
|
Benchmark* Benchmark::DenseRange(int64_t start, int64_t limit, int step) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
|
||||||
CHECK_LE(start, limit);
|
BM_CHECK_LE(start, limit);
|
||||||
for (int64_t arg = start; arg <= limit; arg += step) {
|
for (int64_t arg = start; arg <= limit; arg += step) {
|
||||||
args_.push_back({arg});
|
args_.push_back({arg});
|
||||||
}
|
}
|
||||||
|
@ -310,7 +310,7 @@ Benchmark* Benchmark::DenseRange(int64_t start, int64_t limit, int step) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Args(const std::vector<int64_t>& args) {
|
Benchmark* Benchmark::Args(const std::vector<int64_t>& args) {
|
||||||
CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(args.size()));
|
BM_CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(args.size()));
|
||||||
args_.push_back(args);
|
args_.push_back(args);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -321,27 +321,27 @@ Benchmark* Benchmark::Apply(void (*custom_arguments)(Benchmark* benchmark)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::RangeMultiplier(int multiplier) {
|
Benchmark* Benchmark::RangeMultiplier(int multiplier) {
|
||||||
CHECK(multiplier > 1);
|
BM_CHECK(multiplier > 1);
|
||||||
range_multiplier_ = multiplier;
|
range_multiplier_ = multiplier;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::MinTime(double t) {
|
Benchmark* Benchmark::MinTime(double t) {
|
||||||
CHECK(t > 0.0);
|
BM_CHECK(t > 0.0);
|
||||||
CHECK(iterations_ == 0);
|
BM_CHECK(iterations_ == 0);
|
||||||
min_time_ = t;
|
min_time_ = t;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Iterations(IterationCount n) {
|
Benchmark* Benchmark::Iterations(IterationCount n) {
|
||||||
CHECK(n > 0);
|
BM_CHECK(n > 0);
|
||||||
CHECK(IsZero(min_time_));
|
BM_CHECK(IsZero(min_time_));
|
||||||
iterations_ = n;
|
iterations_ = n;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Repetitions(int n) {
|
Benchmark* Benchmark::Repetitions(int n) {
|
||||||
CHECK(n > 0);
|
BM_CHECK(n > 0);
|
||||||
repetitions_ = n;
|
repetitions_ = n;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -374,14 +374,14 @@ Benchmark* Benchmark::MeasureProcessCPUTime() {
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::UseRealTime() {
|
Benchmark* Benchmark::UseRealTime() {
|
||||||
CHECK(!use_manual_time_)
|
BM_CHECK(!use_manual_time_)
|
||||||
<< "Cannot set UseRealTime and UseManualTime simultaneously.";
|
<< "Cannot set UseRealTime and UseManualTime simultaneously.";
|
||||||
use_real_time_ = true;
|
use_real_time_ = true;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::UseManualTime() {
|
Benchmark* Benchmark::UseManualTime() {
|
||||||
CHECK(!use_real_time_)
|
BM_CHECK(!use_real_time_)
|
||||||
<< "Cannot set UseRealTime and UseManualTime simultaneously.";
|
<< "Cannot set UseRealTime and UseManualTime simultaneously.";
|
||||||
use_manual_time_ = true;
|
use_manual_time_ = true;
|
||||||
return this;
|
return this;
|
||||||
|
@ -405,14 +405,14 @@ Benchmark* Benchmark::ComputeStatistics(std::string name,
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::Threads(int t) {
|
Benchmark* Benchmark::Threads(int t) {
|
||||||
CHECK_GT(t, 0);
|
BM_CHECK_GT(t, 0);
|
||||||
thread_counts_.push_back(t);
|
thread_counts_.push_back(t);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Benchmark* Benchmark::ThreadRange(int min_threads, int max_threads) {
|
Benchmark* Benchmark::ThreadRange(int min_threads, int max_threads) {
|
||||||
CHECK_GT(min_threads, 0);
|
BM_CHECK_GT(min_threads, 0);
|
||||||
CHECK_GE(max_threads, min_threads);
|
BM_CHECK_GE(max_threads, min_threads);
|
||||||
|
|
||||||
AddRange(&thread_counts_, min_threads, max_threads, 2);
|
AddRange(&thread_counts_, min_threads, max_threads, 2);
|
||||||
return this;
|
return this;
|
||||||
|
@ -420,9 +420,9 @@ Benchmark* Benchmark::ThreadRange(int min_threads, int max_threads) {
|
||||||
|
|
||||||
Benchmark* Benchmark::DenseThreadRange(int min_threads, int max_threads,
|
Benchmark* Benchmark::DenseThreadRange(int min_threads, int max_threads,
|
||||||
int stride) {
|
int stride) {
|
||||||
CHECK_GT(min_threads, 0);
|
BM_CHECK_GT(min_threads, 0);
|
||||||
CHECK_GE(max_threads, min_threads);
|
BM_CHECK_GE(max_threads, min_threads);
|
||||||
CHECK_GE(stride, 1);
|
BM_CHECK_GE(stride, 1);
|
||||||
|
|
||||||
for (auto i = min_threads; i < max_threads; i += stride) {
|
for (auto i = min_threads; i < max_threads; i += stride) {
|
||||||
thread_counts_.push_back(i);
|
thread_counts_.push_back(i);
|
||||||
|
@ -466,7 +466,7 @@ std::vector<int64_t> CreateRange(int64_t lo, int64_t hi, int multi) {
|
||||||
|
|
||||||
std::vector<int64_t> CreateDenseRange(int64_t start, int64_t limit,
|
std::vector<int64_t> CreateDenseRange(int64_t start, int64_t limit,
|
||||||
int step) {
|
int step) {
|
||||||
CHECK_LE(start, limit);
|
BM_CHECK_LE(start, limit);
|
||||||
std::vector<int64_t> args;
|
std::vector<int64_t> args;
|
||||||
for (int64_t arg = start; arg <= limit; arg += step) {
|
for (int64_t arg = start; arg <= limit; arg += step) {
|
||||||
args.push_back(arg);
|
args.push_back(arg);
|
||||||
|
|
|
@ -14,9 +14,9 @@ namespace internal {
|
||||||
template <typename T>
|
template <typename T>
|
||||||
typename std::vector<T>::iterator
|
typename std::vector<T>::iterator
|
||||||
AddPowers(std::vector<T>* dst, T lo, T hi, int mult) {
|
AddPowers(std::vector<T>* dst, T lo, T hi, int mult) {
|
||||||
CHECK_GE(lo, 0);
|
BM_CHECK_GE(lo, 0);
|
||||||
CHECK_GE(hi, lo);
|
BM_CHECK_GE(hi, lo);
|
||||||
CHECK_GE(mult, 2);
|
BM_CHECK_GE(mult, 2);
|
||||||
|
|
||||||
const size_t start_offset = dst->size();
|
const size_t start_offset = dst->size();
|
||||||
|
|
||||||
|
@ -38,10 +38,10 @@ AddPowers(std::vector<T>* dst, T lo, T hi, int mult) {
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void AddNegatedPowers(std::vector<T>* dst, T lo, T hi, int mult) {
|
void AddNegatedPowers(std::vector<T>* dst, T lo, T hi, int mult) {
|
||||||
// We negate lo and hi so we require that they cannot be equal to 'min'.
|
// We negate lo and hi so we require that they cannot be equal to 'min'.
|
||||||
CHECK_GT(lo, std::numeric_limits<T>::min());
|
BM_CHECK_GT(lo, std::numeric_limits<T>::min());
|
||||||
CHECK_GT(hi, std::numeric_limits<T>::min());
|
BM_CHECK_GT(hi, std::numeric_limits<T>::min());
|
||||||
CHECK_GE(hi, lo);
|
BM_CHECK_GE(hi, lo);
|
||||||
CHECK_LE(hi, 0);
|
BM_CHECK_LE(hi, 0);
|
||||||
|
|
||||||
// Add positive powers, then negate and reverse.
|
// Add positive powers, then negate and reverse.
|
||||||
// Casts necessary since small integers get promoted
|
// Casts necessary since small integers get promoted
|
||||||
|
@ -60,8 +60,8 @@ void AddRange(std::vector<T>* dst, T lo, T hi, int mult) {
|
||||||
static_assert(std::is_integral<T>::value && std::is_signed<T>::value,
|
static_assert(std::is_integral<T>::value && std::is_signed<T>::value,
|
||||||
"Args type must be a signed integer");
|
"Args type must be a signed integer");
|
||||||
|
|
||||||
CHECK_GE(hi, lo);
|
BM_CHECK_GE(hi, lo);
|
||||||
CHECK_GE(mult, 2);
|
BM_CHECK_GE(mult, 2);
|
||||||
|
|
||||||
// Add "lo"
|
// Add "lo"
|
||||||
dst->push_back(lo);
|
dst->push_back(lo);
|
||||||
|
|
|
@ -124,7 +124,7 @@ void RunInThread(const BenchmarkInstance* b, IterationCount iters,
|
||||||
: internal::ThreadTimer::Create());
|
: internal::ThreadTimer::Create());
|
||||||
State st =
|
State st =
|
||||||
b->Run(iters, thread_id, &timer, manager, perf_counters_measurement);
|
b->Run(iters, thread_id, &timer, manager, perf_counters_measurement);
|
||||||
CHECK(st.error_occurred() || st.iterations() >= st.max_iterations)
|
BM_CHECK(st.error_occurred() || st.iterations() >= st.max_iterations)
|
||||||
<< "Benchmark returned before State::KeepRunning() returned false!";
|
<< "Benchmark returned before State::KeepRunning() returned false!";
|
||||||
{
|
{
|
||||||
MutexLock l(manager->GetBenchmarkMutex());
|
MutexLock l(manager->GetBenchmarkMutex());
|
||||||
|
@ -168,7 +168,7 @@ BenchmarkRunner::BenchmarkRunner(
|
||||||
internal::ARM_DisplayReportAggregatesOnly);
|
internal::ARM_DisplayReportAggregatesOnly);
|
||||||
run_results.file_report_aggregates_only =
|
run_results.file_report_aggregates_only =
|
||||||
(b.aggregation_report_mode() & internal::ARM_FileReportAggregatesOnly);
|
(b.aggregation_report_mode() & internal::ARM_FileReportAggregatesOnly);
|
||||||
CHECK(FLAGS_benchmark_perf_counters.empty() ||
|
BM_CHECK(FLAGS_benchmark_perf_counters.empty() ||
|
||||||
perf_counters_measurement.IsValid())
|
perf_counters_measurement.IsValid())
|
||||||
<< "Perf counters were requested but could not be set up.";
|
<< "Perf counters were requested but could not be set up.";
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,11 +25,11 @@
|
||||||
|
|
||||||
namespace benchmark {
|
namespace benchmark {
|
||||||
|
|
||||||
DECLARE_double(benchmark_min_time);
|
BM_DECLARE_double(benchmark_min_time);
|
||||||
DECLARE_int32(benchmark_repetitions);
|
BM_DECLARE_int32(benchmark_repetitions);
|
||||||
DECLARE_bool(benchmark_report_aggregates_only);
|
BM_DECLARE_bool(benchmark_report_aggregates_only);
|
||||||
DECLARE_bool(benchmark_display_aggregates_only);
|
BM_DECLARE_bool(benchmark_display_aggregates_only);
|
||||||
DECLARE_string(benchmark_perf_counters);
|
BM_DECLARE_string(benchmark_perf_counters);
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
|
37
src/check.h
37
src/check.h
|
@ -23,8 +23,9 @@ BENCHMARK_NORETURN inline void CallAbortHandler() {
|
||||||
std::abort(); // fallback to enforce noreturn
|
std::abort(); // fallback to enforce noreturn
|
||||||
}
|
}
|
||||||
|
|
||||||
// CheckHandler is the class constructed by failing CHECK macros. CheckHandler
|
// CheckHandler is the class constructed by failing BM_CHECK macros.
|
||||||
// will log information about the failures and abort when it is destructed.
|
// CheckHandler will log information about the failures and abort when it is
|
||||||
|
// destructed.
|
||||||
class CheckHandler {
|
class CheckHandler {
|
||||||
public:
|
public:
|
||||||
CheckHandler(const char* check, const char* file, const char* func, int line)
|
CheckHandler(const char* check, const char* file, const char* func, int line)
|
||||||
|
@ -51,32 +52,32 @@ class CheckHandler {
|
||||||
} // end namespace internal
|
} // end namespace internal
|
||||||
} // end namespace benchmark
|
} // end namespace benchmark
|
||||||
|
|
||||||
// The CHECK macro returns a std::ostream object that can have extra information
|
// The BM_CHECK macro returns a std::ostream object that can have extra
|
||||||
// written to it.
|
// information written to it.
|
||||||
#ifndef NDEBUG
|
#ifndef NDEBUG
|
||||||
#define CHECK(b) \
|
#define BM_CHECK(b) \
|
||||||
(b ? ::benchmark::internal::GetNullLogInstance() \
|
(b ? ::benchmark::internal::GetNullLogInstance() \
|
||||||
: ::benchmark::internal::CheckHandler(#b, __FILE__, __func__, __LINE__) \
|
: ::benchmark::internal::CheckHandler(#b, __FILE__, __func__, __LINE__) \
|
||||||
.GetLog())
|
.GetLog())
|
||||||
#else
|
#else
|
||||||
#define CHECK(b) ::benchmark::internal::GetNullLogInstance()
|
#define BM_CHECK(b) ::benchmark::internal::GetNullLogInstance()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// clang-format off
|
// clang-format off
|
||||||
// preserve whitespacing between operators for alignment
|
// preserve whitespacing between operators for alignment
|
||||||
#define CHECK_EQ(a, b) CHECK((a) == (b))
|
#define BM_CHECK_EQ(a, b) BM_CHECK((a) == (b))
|
||||||
#define CHECK_NE(a, b) CHECK((a) != (b))
|
#define BM_CHECK_NE(a, b) BM_CHECK((a) != (b))
|
||||||
#define CHECK_GE(a, b) CHECK((a) >= (b))
|
#define BM_CHECK_GE(a, b) BM_CHECK((a) >= (b))
|
||||||
#define CHECK_LE(a, b) CHECK((a) <= (b))
|
#define BM_CHECK_LE(a, b) BM_CHECK((a) <= (b))
|
||||||
#define CHECK_GT(a, b) CHECK((a) > (b))
|
#define BM_CHECK_GT(a, b) BM_CHECK((a) > (b))
|
||||||
#define CHECK_LT(a, b) CHECK((a) < (b))
|
#define BM_CHECK_LT(a, b) BM_CHECK((a) < (b))
|
||||||
|
|
||||||
#define CHECK_FLOAT_EQ(a, b, eps) CHECK(std::fabs((a) - (b)) < (eps))
|
#define BM_CHECK_FLOAT_EQ(a, b, eps) BM_CHECK(std::fabs((a) - (b)) < (eps))
|
||||||
#define CHECK_FLOAT_NE(a, b, eps) CHECK(std::fabs((a) - (b)) >= (eps))
|
#define BM_CHECK_FLOAT_NE(a, b, eps) BM_CHECK(std::fabs((a) - (b)) >= (eps))
|
||||||
#define CHECK_FLOAT_GE(a, b, eps) CHECK((a) - (b) > -(eps))
|
#define BM_CHECK_FLOAT_GE(a, b, eps) BM_CHECK((a) - (b) > -(eps))
|
||||||
#define CHECK_FLOAT_LE(a, b, eps) CHECK((b) - (a) > -(eps))
|
#define BM_CHECK_FLOAT_LE(a, b, eps) BM_CHECK((b) - (a) > -(eps))
|
||||||
#define CHECK_FLOAT_GT(a, b, eps) CHECK((a) - (b) > (eps))
|
#define BM_CHECK_FLOAT_GT(a, b, eps) BM_CHECK((a) - (b) > (eps))
|
||||||
#define CHECK_FLOAT_LT(a, b, eps) CHECK((b) - (a) > (eps))
|
#define BM_CHECK_FLOAT_LT(a, b, eps) BM_CHECK((b) - (a) > (eps))
|
||||||
//clang-format on
|
//clang-format on
|
||||||
|
|
||||||
#endif // CHECK_H_
|
#endif // CHECK_H_
|
||||||
|
|
|
@ -94,7 +94,7 @@ std::string FormatString(const char* msg, va_list args) {
|
||||||
va_end(args_cp);
|
va_end(args_cp);
|
||||||
|
|
||||||
// currently there is no error handling for failure, so this is hack.
|
// currently there is no error handling for failure, so this is hack.
|
||||||
CHECK(ret >= 0);
|
BM_CHECK(ret >= 0);
|
||||||
|
|
||||||
if (ret == 0) // handle empty expansion
|
if (ret == 0) // handle empty expansion
|
||||||
return {};
|
return {};
|
||||||
|
@ -105,7 +105,7 @@ std::string FormatString(const char* msg, va_list args) {
|
||||||
size = (size_t)ret + 1; // + 1 for the null byte
|
size = (size_t)ret + 1; // + 1 for the null byte
|
||||||
std::unique_ptr<char[]> buff(new char[size]);
|
std::unique_ptr<char[]> buff(new char[size]);
|
||||||
ret = vsnprintf(buff.get(), size, msg, args);
|
ret = vsnprintf(buff.get(), size, msg, args);
|
||||||
CHECK(ret > 0 && ((size_t)ret) < size);
|
BM_CHECK(ret > 0 && ((size_t)ret) < size);
|
||||||
return buff.get();
|
return buff.get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,23 +9,23 @@
|
||||||
#define FLAG(name) FLAGS_##name
|
#define FLAG(name) FLAGS_##name
|
||||||
|
|
||||||
// Macros for declaring flags.
|
// Macros for declaring flags.
|
||||||
#define DECLARE_bool(name) extern bool FLAG(name)
|
#define BM_DECLARE_bool(name) extern bool FLAG(name)
|
||||||
#define DECLARE_int32(name) extern int32_t FLAG(name)
|
#define BM_DECLARE_int32(name) extern int32_t FLAG(name)
|
||||||
#define DECLARE_double(name) extern double FLAG(name)
|
#define BM_DECLARE_double(name) extern double FLAG(name)
|
||||||
#define DECLARE_string(name) extern std::string FLAG(name)
|
#define BM_DECLARE_string(name) extern std::string FLAG(name)
|
||||||
#define DECLARE_kvpairs(name) \
|
#define BM_DECLARE_kvpairs(name) \
|
||||||
extern std::map<std::string, std::string> FLAG(name)
|
extern std::map<std::string, std::string> FLAG(name)
|
||||||
|
|
||||||
// Macros for defining flags.
|
// Macros for defining flags.
|
||||||
#define DEFINE_bool(name, default_val) \
|
#define BM_DEFINE_bool(name, default_val) \
|
||||||
bool FLAG(name) = benchmark::BoolFromEnv(#name, default_val)
|
bool FLAG(name) = benchmark::BoolFromEnv(#name, default_val)
|
||||||
#define DEFINE_int32(name, default_val) \
|
#define BM_DEFINE_int32(name, default_val) \
|
||||||
int32_t FLAG(name) = benchmark::Int32FromEnv(#name, default_val)
|
int32_t FLAG(name) = benchmark::Int32FromEnv(#name, default_val)
|
||||||
#define DEFINE_double(name, default_val) \
|
#define BM_DEFINE_double(name, default_val) \
|
||||||
double FLAG(name) = benchmark::DoubleFromEnv(#name, default_val)
|
double FLAG(name) = benchmark::DoubleFromEnv(#name, default_val)
|
||||||
#define DEFINE_string(name, default_val) \
|
#define BM_DEFINE_string(name, default_val) \
|
||||||
std::string FLAG(name) = benchmark::StringFromEnv(#name, default_val)
|
std::string FLAG(name) = benchmark::StringFromEnv(#name, default_val)
|
||||||
#define DEFINE_kvpairs(name, default_val) \
|
#define BM_DEFINE_kvpairs(name, default_val) \
|
||||||
std::map<std::string, std::string> FLAG(name) = \
|
std::map<std::string, std::string> FLAG(name) = \
|
||||||
benchmark::KvPairsFromEnv(#name, default_val)
|
benchmark::KvPairsFromEnv(#name, default_val)
|
||||||
|
|
||||||
|
|
|
@ -123,10 +123,10 @@ LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
|
||||||
// fitting curve.
|
// fitting curve.
|
||||||
LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
|
LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
|
||||||
const std::vector<double>& time, const BigO complexity) {
|
const std::vector<double>& time, const BigO complexity) {
|
||||||
CHECK_EQ(n.size(), time.size());
|
BM_CHECK_EQ(n.size(), time.size());
|
||||||
CHECK_GE(n.size(), 2); // Do not compute fitting curve is less than two
|
BM_CHECK_GE(n.size(), 2); // Do not compute fitting curve is less than two
|
||||||
// benchmark runs are given
|
// benchmark runs are given
|
||||||
CHECK_NE(complexity, oNone);
|
BM_CHECK_NE(complexity, oNone);
|
||||||
|
|
||||||
LeastSq best_fit;
|
LeastSq best_fit;
|
||||||
|
|
||||||
|
@ -167,7 +167,8 @@ std::vector<BenchmarkReporter::Run> ComputeBigO(
|
||||||
|
|
||||||
// Populate the accumulators.
|
// Populate the accumulators.
|
||||||
for (const Run& run : reports) {
|
for (const Run& run : reports) {
|
||||||
CHECK_GT(run.complexity_n, 0) << "Did you forget to call SetComplexityN?";
|
BM_CHECK_GT(run.complexity_n, 0)
|
||||||
|
<< "Did you forget to call SetComplexityN?";
|
||||||
n.push_back(run.complexity_n);
|
n.push_back(run.complexity_n);
|
||||||
real_time.push_back(run.real_accumulated_time / run.iterations);
|
real_time.push_back(run.real_accumulated_time / run.iterations);
|
||||||
cpu_time.push_back(run.cpu_accumulated_time / run.iterations);
|
cpu_time.push_back(run.cpu_accumulated_time / run.iterations);
|
||||||
|
|
|
@ -85,7 +85,8 @@ void CSVReporter::ReportRuns(const std::vector<Run>& reports) {
|
||||||
for (const auto& cnt : run.counters) {
|
for (const auto& cnt : run.counters) {
|
||||||
if (cnt.first == "bytes_per_second" || cnt.first == "items_per_second")
|
if (cnt.first == "bytes_per_second" || cnt.first == "items_per_second")
|
||||||
continue;
|
continue;
|
||||||
CHECK(user_counter_names_.find(cnt.first) != user_counter_names_.end())
|
BM_CHECK(user_counter_names_.find(cnt.first) !=
|
||||||
|
user_counter_names_.end())
|
||||||
<< "All counters must be present in each run. "
|
<< "All counters must be present in each run. "
|
||||||
<< "Counter named \"" << cnt.first
|
<< "Counter named \"" << cnt.first
|
||||||
<< "\" was not in a run after being added to the header";
|
<< "\" was not in a run after being added to the header";
|
||||||
|
|
|
@ -130,7 +130,7 @@ class Barrier {
|
||||||
// entered the barrier. Returns iff this is the last thread to
|
// entered the barrier. Returns iff this is the last thread to
|
||||||
// enter the barrier.
|
// enter the barrier.
|
||||||
bool createBarrier(MutexLock& ml) REQUIRES(lock_) {
|
bool createBarrier(MutexLock& ml) REQUIRES(lock_) {
|
||||||
CHECK_LT(entered_, running_threads_);
|
BM_CHECK_LT(entered_, running_threads_);
|
||||||
entered_++;
|
entered_++;
|
||||||
if (entered_ < running_threads_) {
|
if (entered_ < running_threads_) {
|
||||||
// Wait for all threads to enter
|
// Wait for all threads to enter
|
||||||
|
|
|
@ -42,7 +42,7 @@ namespace internal {
|
||||||
class PerfCounterValues {
|
class PerfCounterValues {
|
||||||
public:
|
public:
|
||||||
explicit PerfCounterValues(size_t nr_counters) : nr_counters_(nr_counters) {
|
explicit PerfCounterValues(size_t nr_counters) : nr_counters_(nr_counters) {
|
||||||
CHECK_LE(nr_counters_, kMaxCounters);
|
BM_CHECK_LE(nr_counters_, kMaxCounters);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t operator[](size_t pos) const { return values_[kPadding + pos]; }
|
uint64_t operator[](size_t pos) const { return values_[kPadding + pos]; }
|
||||||
|
|
2
src/re.h
2
src/re.h
|
@ -126,7 +126,7 @@ inline bool Regex::Init(const std::string& spec, std::string* error) {
|
||||||
|
|
||||||
// regerror returns the number of bytes necessary to null terminate
|
// regerror returns the number of bytes necessary to null terminate
|
||||||
// the string, so we move that when assigning to error.
|
// the string, so we move that when assigning to error.
|
||||||
CHECK_NE(needed, 0);
|
BM_CHECK_NE(needed, 0);
|
||||||
error->assign(errbuf, needed - 1);
|
error->assign(errbuf, needed - 1);
|
||||||
|
|
||||||
delete[] errbuf;
|
delete[] errbuf;
|
||||||
|
|
|
@ -38,7 +38,7 @@ BenchmarkReporter::~BenchmarkReporter() {}
|
||||||
|
|
||||||
void BenchmarkReporter::PrintBasicContext(std::ostream *out,
|
void BenchmarkReporter::PrintBasicContext(std::ostream *out,
|
||||||
Context const &context) {
|
Context const &context) {
|
||||||
CHECK(out) << "cannot be null";
|
BM_CHECK(out) << "cannot be null";
|
||||||
auto &Out = *out;
|
auto &Out = *out;
|
||||||
|
|
||||||
Out << LocalDateTimeString() << "\n";
|
Out << LocalDateTimeString() << "\n";
|
||||||
|
|
|
@ -112,22 +112,22 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
||||||
it = counter_stats.find(cnt.first);
|
it = counter_stats.find(cnt.first);
|
||||||
it->second.s.reserve(reports.size());
|
it->second.s.reserve(reports.size());
|
||||||
} else {
|
} else {
|
||||||
CHECK_EQ(counter_stats[cnt.first].c.flags, cnt.second.flags);
|
BM_CHECK_EQ(counter_stats[cnt.first].c.flags, cnt.second.flags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the accumulators.
|
// Populate the accumulators.
|
||||||
for (Run const& run : reports) {
|
for (Run const& run : reports) {
|
||||||
CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name());
|
BM_CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name());
|
||||||
CHECK_EQ(run_iterations, run.iterations);
|
BM_CHECK_EQ(run_iterations, run.iterations);
|
||||||
if (run.error_occurred) continue;
|
if (run.error_occurred) continue;
|
||||||
real_accumulated_time_stat.emplace_back(run.real_accumulated_time);
|
real_accumulated_time_stat.emplace_back(run.real_accumulated_time);
|
||||||
cpu_accumulated_time_stat.emplace_back(run.cpu_accumulated_time);
|
cpu_accumulated_time_stat.emplace_back(run.cpu_accumulated_time);
|
||||||
// user counters
|
// user counters
|
||||||
for (auto const& cnt : run.counters) {
|
for (auto const& cnt : run.counters) {
|
||||||
auto it = counter_stats.find(cnt.first);
|
auto it = counter_stats.find(cnt.first);
|
||||||
CHECK_NE(it, counter_stats.end());
|
BM_CHECK_NE(it, counter_stats.end());
|
||||||
it->second.s.emplace_back(cnt.second);
|
it->second.s.emplace_back(cnt.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -135,7 +135,7 @@ struct ValueUnion {
|
||||||
template <class T, int N>
|
template <class T, int N>
|
||||||
std::array<T, N> GetAsArray() {
|
std::array<T, N> GetAsArray() {
|
||||||
const int ArrSize = sizeof(T) * N;
|
const int ArrSize = sizeof(T) * N;
|
||||||
CHECK_LE(ArrSize, Size);
|
BM_CHECK_LE(ArrSize, Size);
|
||||||
std::array<T, N> Arr;
|
std::array<T, N> Arr;
|
||||||
std::memcpy(Arr.data(), data(), ArrSize);
|
std::memcpy(Arr.data(), data(), ArrSize);
|
||||||
return Arr;
|
return Arr;
|
||||||
|
|
|
@ -28,7 +28,7 @@ class ThreadTimer {
|
||||||
|
|
||||||
// Called by each thread
|
// Called by each thread
|
||||||
void StopTimer() {
|
void StopTimer() {
|
||||||
CHECK(running_);
|
BM_CHECK(running_);
|
||||||
running_ = false;
|
running_ = false;
|
||||||
real_time_used_ += ChronoClockNow() - start_real_time_;
|
real_time_used_ += ChronoClockNow() - start_real_time_;
|
||||||
// Floating point error can result in the subtraction producing a negative
|
// Floating point error can result in the subtraction producing a negative
|
||||||
|
@ -44,19 +44,19 @@ class ThreadTimer {
|
||||||
|
|
||||||
// REQUIRES: timer is not running
|
// REQUIRES: timer is not running
|
||||||
double real_time_used() const {
|
double real_time_used() const {
|
||||||
CHECK(!running_);
|
BM_CHECK(!running_);
|
||||||
return real_time_used_;
|
return real_time_used_;
|
||||||
}
|
}
|
||||||
|
|
||||||
// REQUIRES: timer is not running
|
// REQUIRES: timer is not running
|
||||||
double cpu_time_used() const {
|
double cpu_time_used() const {
|
||||||
CHECK(!running_);
|
BM_CHECK(!running_);
|
||||||
return cpu_time_used_;
|
return cpu_time_used_;
|
||||||
}
|
}
|
||||||
|
|
||||||
// REQUIRES: timer is not running
|
// REQUIRES: timer is not running
|
||||||
double manual_time_used() const {
|
double manual_time_used() const {
|
||||||
CHECK(!running_);
|
BM_CHECK(!running_);
|
||||||
return manual_time_used_;
|
return manual_time_used_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -225,7 +225,7 @@ std::string LocalDateTimeString() {
|
||||||
|
|
||||||
tz_len = ::snprintf(tz_offset, sizeof(tz_offset), "%c%02li:%02li",
|
tz_len = ::snprintf(tz_offset, sizeof(tz_offset), "%c%02li:%02li",
|
||||||
tz_offset_sign, offset_minutes / 100, offset_minutes % 100);
|
tz_offset_sign, offset_minutes / 100, offset_minutes % 100);
|
||||||
CHECK(tz_len == kTzOffsetLen);
|
BM_CHECK(tz_len == kTzOffsetLen);
|
||||||
((void)tz_len); // Prevent unused variable warning in optimized build.
|
((void)tz_len); // Prevent unused variable warning in optimized build.
|
||||||
} else {
|
} else {
|
||||||
// Unknown offset. RFC3339 specifies that unknown local offsets should be
|
// Unknown offset. RFC3339 specifies that unknown local offsets should be
|
||||||
|
@ -242,7 +242,7 @@ std::string LocalDateTimeString() {
|
||||||
|
|
||||||
timestamp_len = std::strftime(storage, sizeof(storage), "%Y-%m-%dT%H:%M:%S",
|
timestamp_len = std::strftime(storage, sizeof(storage), "%Y-%m-%dT%H:%M:%S",
|
||||||
timeinfo_p);
|
timeinfo_p);
|
||||||
CHECK(timestamp_len == kTimestampLen);
|
BM_CHECK(timestamp_len == kTimestampLen);
|
||||||
// Prevent unused variable warning in optimized build.
|
// Prevent unused variable warning in optimized build.
|
||||||
((void)kTimestampLen);
|
((void)kTimestampLen);
|
||||||
|
|
||||||
|
|
|
@ -10,9 +10,9 @@
|
||||||
|
|
||||||
namespace benchmark {
|
namespace benchmark {
|
||||||
|
|
||||||
DECLARE_bool(benchmark_enable_random_interleaving);
|
BM_DECLARE_bool(benchmark_enable_random_interleaving);
|
||||||
DECLARE_string(benchmark_filter);
|
BM_DECLARE_string(benchmark_filter);
|
||||||
DECLARE_int32(benchmark_repetitions);
|
BM_DECLARE_int32(benchmark_repetitions);
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
namespace {
|
namespace {
|
||||||
|
|
|
@ -143,12 +143,12 @@ struct Results {
|
||||||
template <class T>
|
template <class T>
|
||||||
T Results::GetAs(const char* entry_name) const {
|
T Results::GetAs(const char* entry_name) const {
|
||||||
auto* sv = Get(entry_name);
|
auto* sv = Get(entry_name);
|
||||||
CHECK(sv != nullptr && !sv->empty());
|
BM_CHECK(sv != nullptr && !sv->empty());
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << *sv;
|
ss << *sv;
|
||||||
T out;
|
T out;
|
||||||
ss >> out;
|
ss >> out;
|
||||||
CHECK(!ss.fail());
|
BM_CHECK(!ss.fail());
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -159,7 +159,7 @@ T Results::GetAs(const char* entry_name) const {
|
||||||
// clang-format off
|
// clang-format off
|
||||||
|
|
||||||
#define CHECK_RESULT_VALUE_IMPL(entry, getfn, var_type, var_name, relationship, value) \
|
#define CHECK_RESULT_VALUE_IMPL(entry, getfn, var_type, var_name, relationship, value) \
|
||||||
CONCAT(CHECK_, relationship) \
|
CONCAT(BM_CHECK_, relationship) \
|
||||||
(entry.getfn< var_type >(var_name), (value)) << "\n" \
|
(entry.getfn< var_type >(var_name), (value)) << "\n" \
|
||||||
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
|
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
|
||||||
<< __FILE__ << ":" << __LINE__ << ": " \
|
<< __FILE__ << ":" << __LINE__ << ": " \
|
||||||
|
@ -170,7 +170,7 @@ T Results::GetAs(const char* entry_name) const {
|
||||||
// check with tolerance. eps_factor is the tolerance window, which is
|
// check with tolerance. eps_factor is the tolerance window, which is
|
||||||
// interpreted relative to value (eg, 0.1 means 10% of value).
|
// interpreted relative to value (eg, 0.1 means 10% of value).
|
||||||
#define CHECK_FLOAT_RESULT_VALUE_IMPL(entry, getfn, var_type, var_name, relationship, value, eps_factor) \
|
#define CHECK_FLOAT_RESULT_VALUE_IMPL(entry, getfn, var_type, var_name, relationship, value, eps_factor) \
|
||||||
CONCAT(CHECK_FLOAT_, relationship) \
|
CONCAT(BM_CHECK_FLOAT_, relationship) \
|
||||||
(entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \
|
(entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \
|
||||||
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
|
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
|
||||||
<< __FILE__ << ":" << __LINE__ << ": " \
|
<< __FILE__ << ":" << __LINE__ << ": " \
|
||||||
|
|
|
@ -94,27 +94,27 @@ void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
|
||||||
bool on_first = true;
|
bool on_first = true;
|
||||||
std::string line;
|
std::string line;
|
||||||
while (remaining_output.eof() == false) {
|
while (remaining_output.eof() == false) {
|
||||||
CHECK(remaining_output.good());
|
BM_CHECK(remaining_output.good());
|
||||||
std::getline(remaining_output, line);
|
std::getline(remaining_output, line);
|
||||||
if (on_first) {
|
if (on_first) {
|
||||||
first_line = line;
|
first_line = line;
|
||||||
on_first = false;
|
on_first = false;
|
||||||
}
|
}
|
||||||
for (const auto& NC : not_checks) {
|
for (const auto& NC : not_checks) {
|
||||||
CHECK(!NC.regex->Match(line))
|
BM_CHECK(!NC.regex->Match(line))
|
||||||
<< "Unexpected match for line \"" << line << "\" for MR_Not regex \""
|
<< "Unexpected match for line \"" << line << "\" for MR_Not regex \""
|
||||||
<< NC.regex_str << "\""
|
<< NC.regex_str << "\""
|
||||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||||
<< "\n started matching near: " << first_line;
|
<< "\n started matching near: " << first_line;
|
||||||
}
|
}
|
||||||
if (TC.regex->Match(line)) return;
|
if (TC.regex->Match(line)) return;
|
||||||
CHECK(TC.match_rule != MR_Next)
|
BM_CHECK(TC.match_rule != MR_Next)
|
||||||
<< "Expected line \"" << line << "\" to match regex \"" << TC.regex_str
|
<< "Expected line \"" << line << "\" to match regex \"" << TC.regex_str
|
||||||
<< "\""
|
<< "\""
|
||||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||||
<< "\n started matching near: " << first_line;
|
<< "\n started matching near: " << first_line;
|
||||||
}
|
}
|
||||||
CHECK(remaining_output.eof() == false)
|
BM_CHECK(remaining_output.eof() == false)
|
||||||
<< "End of output reached before match for regex \"" << TC.regex_str
|
<< "End of output reached before match for regex \"" << TC.regex_str
|
||||||
<< "\" was found"
|
<< "\" was found"
|
||||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||||
|
@ -144,7 +144,7 @@ class TestReporter : public benchmark::BenchmarkReporter {
|
||||||
bool first = true;
|
bool first = true;
|
||||||
for (auto rep : reporters_) {
|
for (auto rep : reporters_) {
|
||||||
bool new_ret = rep->ReportContext(context);
|
bool new_ret = rep->ReportContext(context);
|
||||||
CHECK(first || new_ret == last_ret)
|
BM_CHECK(first || new_ret == last_ret)
|
||||||
<< "Reports return different values for ReportContext";
|
<< "Reports return different values for ReportContext";
|
||||||
first = false;
|
first = false;
|
||||||
last_ret = new_ret;
|
last_ret = new_ret;
|
||||||
|
@ -226,7 +226,7 @@ void ResultsChecker::CheckResults(std::stringstream& output) {
|
||||||
std::string line;
|
std::string line;
|
||||||
bool on_first = true;
|
bool on_first = true;
|
||||||
while (output.eof() == false) {
|
while (output.eof() == false) {
|
||||||
CHECK(output.good());
|
BM_CHECK(output.good());
|
||||||
std::getline(output, line);
|
std::getline(output, line);
|
||||||
if (on_first) {
|
if (on_first) {
|
||||||
SetHeader_(line); // this is important
|
SetHeader_(line); // this is important
|
||||||
|
@ -261,9 +261,9 @@ void ResultsChecker::SetHeader_(const std::string& csv_header) {
|
||||||
// set the values for a benchmark
|
// set the values for a benchmark
|
||||||
void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
|
void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
|
||||||
if (entry_csv_line.empty()) return; // some lines are empty
|
if (entry_csv_line.empty()) return; // some lines are empty
|
||||||
CHECK(!field_names.empty());
|
BM_CHECK(!field_names.empty());
|
||||||
auto vals = SplitCsv_(entry_csv_line);
|
auto vals = SplitCsv_(entry_csv_line);
|
||||||
CHECK_EQ(vals.size(), field_names.size());
|
BM_CHECK_EQ(vals.size(), field_names.size());
|
||||||
results.emplace_back(vals[0]); // vals[0] is the benchmark name
|
results.emplace_back(vals[0]); // vals[0] is the benchmark name
|
||||||
auto& entry = results.back();
|
auto& entry = results.back();
|
||||||
for (size_t i = 1, e = vals.size(); i < e; ++i) {
|
for (size_t i = 1, e = vals.size(); i < e; ++i) {
|
||||||
|
@ -278,7 +278,7 @@ std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
|
||||||
if (!field_names.empty()) out.reserve(field_names.size());
|
if (!field_names.empty()) out.reserve(field_names.size());
|
||||||
size_t prev = 0, pos = line.find_first_of(','), curr = pos;
|
size_t prev = 0, pos = line.find_first_of(','), curr = pos;
|
||||||
while (pos != line.npos) {
|
while (pos != line.npos) {
|
||||||
CHECK(curr > 0);
|
BM_CHECK(curr > 0);
|
||||||
if (line[prev] == '"') ++prev;
|
if (line[prev] == '"') ++prev;
|
||||||
if (line[curr - 1] == '"') --curr;
|
if (line[curr - 1] == '"') --curr;
|
||||||
out.push_back(line.substr(prev, curr - prev));
|
out.push_back(line.substr(prev, curr - prev));
|
||||||
|
@ -309,7 +309,7 @@ int Results::NumThreads() const {
|
||||||
ss << name.substr(pos + 9, end);
|
ss << name.substr(pos + 9, end);
|
||||||
int num = 1;
|
int num = 1;
|
||||||
ss >> num;
|
ss >> num;
|
||||||
CHECK(!ss.fail());
|
BM_CHECK(!ss.fail());
|
||||||
return num;
|
return num;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -318,11 +318,11 @@ double Results::NumIterations() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
double Results::GetTime(BenchmarkTime which) const {
|
double Results::GetTime(BenchmarkTime which) const {
|
||||||
CHECK(which == kCpuTime || which == kRealTime);
|
BM_CHECK(which == kCpuTime || which == kRealTime);
|
||||||
const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
|
const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
|
||||||
double val = GetAs<double>(which_str);
|
double val = GetAs<double>(which_str);
|
||||||
auto unit = Get("time_unit");
|
auto unit = Get("time_unit");
|
||||||
CHECK(unit);
|
BM_CHECK(unit);
|
||||||
if (*unit == "ns") {
|
if (*unit == "ns") {
|
||||||
return val * 1.e-9;
|
return val * 1.e-9;
|
||||||
} else if (*unit == "us") {
|
} else if (*unit == "us") {
|
||||||
|
@ -332,7 +332,7 @@ double Results::GetTime(BenchmarkTime which) const {
|
||||||
} else if (*unit == "s") {
|
} else if (*unit == "s") {
|
||||||
return val;
|
return val;
|
||||||
} else {
|
} else {
|
||||||
CHECK(1 == 0) << "unknown time unit: " << *unit;
|
BM_CHECK(1 == 0) << "unknown time unit: " << *unit;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -348,8 +348,8 @@ TestCase::TestCase(std::string re, int rule)
|
||||||
regex(std::make_shared<benchmark::Regex>()) {
|
regex(std::make_shared<benchmark::Regex>()) {
|
||||||
std::string err_str;
|
std::string err_str;
|
||||||
regex->Init(substituted_regex, &err_str);
|
regex->Init(substituted_regex, &err_str);
|
||||||
CHECK(err_str.empty()) << "Could not construct regex \"" << substituted_regex
|
BM_CHECK(err_str.empty())
|
||||||
<< "\""
|
<< "Could not construct regex \"" << substituted_regex << "\""
|
||||||
<< "\n originally \"" << regex_str << "\""
|
<< "\n originally \"" << regex_str << "\""
|
||||||
<< "\n got error: " << err_str;
|
<< "\n got error: " << err_str;
|
||||||
}
|
}
|
||||||
|
@ -438,7 +438,7 @@ void RunOutputTests(int argc, char* argv[]) {
|
||||||
// the checks to subscribees.
|
// the checks to subscribees.
|
||||||
auto& csv = TestCases[2];
|
auto& csv = TestCases[2];
|
||||||
// would use == but gcc spits a warning
|
// would use == but gcc spits a warning
|
||||||
CHECK(std::strcmp(csv.name, "CSVReporter") == 0);
|
BM_CHECK(std::strcmp(csv.name, "CSVReporter") == 0);
|
||||||
internal::GetResultsChecker().CheckResults(csv.out_stream);
|
internal::GetResultsChecker().CheckResults(csv.out_stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -103,10 +103,10 @@ size_t do_work() {
|
||||||
|
|
||||||
void measure(size_t threadcount, PerfCounterValues* values1,
|
void measure(size_t threadcount, PerfCounterValues* values1,
|
||||||
PerfCounterValues* values2) {
|
PerfCounterValues* values2) {
|
||||||
CHECK_NE(values1, nullptr);
|
BM_CHECK_NE(values1, nullptr);
|
||||||
CHECK_NE(values2, nullptr);
|
BM_CHECK_NE(values2, nullptr);
|
||||||
std::vector<std::thread> threads(threadcount);
|
std::vector<std::thread> threads(threadcount);
|
||||||
auto work = [&]() { CHECK(do_work() > 1000); };
|
auto work = [&]() { BM_CHECK(do_work() > 1000); };
|
||||||
|
|
||||||
// We need to first set up the counters, then start the threads, so the
|
// We need to first set up the counters, then start the threads, so the
|
||||||
// threads would inherit the counters. But later, we need to first destroy the
|
// threads would inherit the counters. But later, we need to first destroy the
|
||||||
|
|
|
@ -30,13 +30,13 @@ struct TestCase {
|
||||||
|
|
||||||
void CheckRun(Run const& run) const {
|
void CheckRun(Run const& run) const {
|
||||||
// clang-format off
|
// clang-format off
|
||||||
CHECK(name == run.benchmark_name()) << "expected " << name << " got "
|
BM_CHECK(name == run.benchmark_name()) << "expected " << name << " got "
|
||||||
<< run.benchmark_name();
|
<< run.benchmark_name();
|
||||||
if (label) {
|
if (label) {
|
||||||
CHECK(run.report_label == label) << "expected " << label << " got "
|
BM_CHECK(run.report_label == label) << "expected " << label << " got "
|
||||||
<< run.report_label;
|
<< run.report_label;
|
||||||
} else {
|
} else {
|
||||||
CHECK(run.report_label == "");
|
BM_CHECK(run.report_label == "");
|
||||||
}
|
}
|
||||||
// clang-format on
|
// clang-format on
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,14 +33,14 @@ struct TestCase {
|
||||||
typedef benchmark::BenchmarkReporter::Run Run;
|
typedef benchmark::BenchmarkReporter::Run Run;
|
||||||
|
|
||||||
void CheckRun(Run const& run) const {
|
void CheckRun(Run const& run) const {
|
||||||
CHECK(name == run.benchmark_name())
|
BM_CHECK(name == run.benchmark_name())
|
||||||
<< "expected " << name << " got " << run.benchmark_name();
|
<< "expected " << name << " got " << run.benchmark_name();
|
||||||
CHECK(error_occurred == run.error_occurred);
|
BM_CHECK(error_occurred == run.error_occurred);
|
||||||
CHECK(error_message == run.error_message);
|
BM_CHECK(error_message == run.error_message);
|
||||||
if (error_occurred) {
|
if (error_occurred) {
|
||||||
// CHECK(run.iterations == 0);
|
// BM_CHECK(run.iterations == 0);
|
||||||
} else {
|
} else {
|
||||||
CHECK(run.iterations != 0);
|
BM_CHECK(run.iterations != 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in a new issue