mirror of https://github.com/google/benchmark.git
(clang-)format all the things (#610)
* format all documents according to contributor guidelines and specifications use clang-format on/off to stop formatting when it makes excessively poor decisions * format all tests as well, and mark blocks which change too much
This commit is contained in:
parent
4fbfa2f336
commit
4c2af07889
|
@ -115,8 +115,7 @@ namespace {
|
|||
|
||||
BenchmarkReporter::Run CreateRunReport(
|
||||
const benchmark::internal::Benchmark::Instance& b,
|
||||
const internal::ThreadManager::Result& results,
|
||||
double seconds) {
|
||||
const internal::ThreadManager::Result& results, double seconds) {
|
||||
// Create report about this benchmark run.
|
||||
BenchmarkReporter::Run report;
|
||||
|
||||
|
@ -234,6 +233,8 @@ std::vector<BenchmarkReporter::Run> RunBenchmark(
|
|||
const double min_time =
|
||||
!IsZero(b.min_time) ? b.min_time : FLAGS_benchmark_min_time;
|
||||
|
||||
// clang-format off
|
||||
// turn off clang-format since it mangles prettiness here
|
||||
// Determine if this run should be reported; Either it has
|
||||
// run for a sufficient amount of time or because an error was reported.
|
||||
const bool should_report = repetition_num > 0
|
||||
|
@ -245,6 +246,7 @@ std::vector<BenchmarkReporter::Run> RunBenchmark(
|
|||
// minimum time. Note that user provided timers are except from this
|
||||
// sanity check.
|
||||
|| ((results.real_time_used >= 5 * min_time) && !b.use_manual_time);
|
||||
// clang-format on
|
||||
|
||||
if (should_report) {
|
||||
BenchmarkReporter::Run report = CreateRunReport(b, results, seconds);
|
||||
|
@ -324,7 +326,8 @@ State::State(size_t max_iters, const std::vector<int64_t>& ranges, int thread_i,
|
|||
// Offset tests to ensure commonly accessed data is on the first cache line.
|
||||
const int cache_line_size = 64;
|
||||
static_assert(offsetof(State, error_occurred_) <=
|
||||
(cache_line_size - sizeof(error_occurred_)), "");
|
||||
(cache_line_size - sizeof(error_occurred_)),
|
||||
"");
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
@ -387,8 +390,8 @@ namespace internal {
|
|||
namespace {
|
||||
|
||||
void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
|
||||
BenchmarkReporter* console_reporter,
|
||||
BenchmarkReporter* file_reporter) {
|
||||
BenchmarkReporter* console_reporter,
|
||||
BenchmarkReporter* file_reporter) {
|
||||
// Note the file_reporter can be null.
|
||||
CHECK(console_reporter != nullptr);
|
||||
|
||||
|
@ -401,7 +404,7 @@ void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
|
|||
std::max<size_t>(name_field_width, benchmark.name.size());
|
||||
has_repetitions |= benchmark.repetitions > 1;
|
||||
|
||||
for(const auto& Stat : *benchmark.statistics)
|
||||
for (const auto& Stat : *benchmark.statistics)
|
||||
stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
|
||||
}
|
||||
if (has_repetitions) name_field_width += 1 + stat_field_width;
|
||||
|
@ -469,15 +472,15 @@ ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
|
|||
} else {
|
||||
output_opts &= ~ConsoleReporter::OO_Color;
|
||||
}
|
||||
if(force_no_color) {
|
||||
if (force_no_color) {
|
||||
output_opts &= ~ConsoleReporter::OO_Color;
|
||||
}
|
||||
if(FLAGS_benchmark_counters_tabular) {
|
||||
if (FLAGS_benchmark_counters_tabular) {
|
||||
output_opts |= ConsoleReporter::OO_Tabular;
|
||||
} else {
|
||||
output_opts &= ~ConsoleReporter::OO_Tabular;
|
||||
}
|
||||
return static_cast< ConsoleReporter::OutputOptions >(output_opts);
|
||||
return static_cast<ConsoleReporter::OutputOptions>(output_opts);
|
||||
}
|
||||
|
||||
} // end namespace internal
|
||||
|
@ -502,7 +505,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
|
|||
std::unique_ptr<BenchmarkReporter> default_file_reporter;
|
||||
if (!console_reporter) {
|
||||
default_console_reporter = internal::CreateReporter(
|
||||
FLAGS_benchmark_format, internal::GetOutputOptions());
|
||||
FLAGS_benchmark_format, internal::GetOutputOptions());
|
||||
console_reporter = default_console_reporter.get();
|
||||
}
|
||||
auto& Out = console_reporter->GetOutputStream();
|
||||
|
@ -589,7 +592,7 @@ void ParseCommandLineFlags(int* argc, char** argv) {
|
|||
// TODO: Remove this.
|
||||
ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
|
||||
ParseBoolFlag(argv[i], "benchmark_counters_tabular",
|
||||
&FLAGS_benchmark_counters_tabular) ||
|
||||
&FLAGS_benchmark_counters_tabular) ||
|
||||
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
|
||||
for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
|
||||
|
||||
|
@ -623,7 +626,8 @@ void Initialize(int* argc, char** argv) {
|
|||
|
||||
bool ReportUnrecognizedArguments(int argc, char** argv) {
|
||||
for (int i = 1; i < argc; ++i) {
|
||||
fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0], argv[i]);
|
||||
fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
|
||||
argv[i]);
|
||||
}
|
||||
return argc > 1;
|
||||
}
|
||||
|
|
|
@ -115,9 +115,9 @@ bool BenchmarkFamilies::FindBenchmarks(
|
|||
std::string error_msg;
|
||||
Regex re;
|
||||
bool isNegativeFilter = false;
|
||||
if(spec[0] == '-') {
|
||||
spec.replace(0, 1, "");
|
||||
isNegativeFilter = true;
|
||||
if (spec[0] == '-') {
|
||||
spec.replace(0, 1, "");
|
||||
isNegativeFilter = true;
|
||||
}
|
||||
if (!re.Init(spec, &error_msg)) {
|
||||
Err << "Could not compile benchmark re: " << error_msg << std::endl;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
#ifndef CHECK_H_
|
||||
#define CHECK_H_
|
||||
|
||||
#include <cmath>
|
||||
#include <cstdlib>
|
||||
#include <ostream>
|
||||
#include <cmath>
|
||||
|
||||
#include "internal_macros.h"
|
||||
#include "log.h"
|
||||
|
@ -62,6 +62,8 @@ class CheckHandler {
|
|||
#define CHECK(b) ::benchmark::internal::GetNullLogInstance()
|
||||
#endif
|
||||
|
||||
// clang-format off
|
||||
// preserve whitespacing between operators for alignment
|
||||
#define CHECK_EQ(a, b) CHECK((a) == (b))
|
||||
#define CHECK_NE(a, b) CHECK((a) != (b))
|
||||
#define CHECK_GE(a, b) CHECK((a) >= (b))
|
||||
|
@ -75,5 +77,6 @@ class CheckHandler {
|
|||
#define CHECK_FLOAT_LE(a, b, eps) CHECK((b) - (a) > -(eps))
|
||||
#define CHECK_FLOAT_GT(a, b, eps) CHECK((a) - (b) > (eps))
|
||||
#define CHECK_FLOAT_LT(a, b, eps) CHECK((b) - (a) > (eps))
|
||||
//clang-format on
|
||||
|
||||
#endif // CHECK_H_
|
||||
|
|
|
@ -45,7 +45,7 @@ bool ParseInt32(const std::string& src_text, const char* str, int32_t* value) {
|
|||
// LONG_MAX or LONG_MIN when the input overflows.)
|
||||
result != long_value
|
||||
// The parsed value overflows as an Int32.
|
||||
) {
|
||||
) {
|
||||
std::cerr << src_text << " is expected to be a 32-bit integer, "
|
||||
<< "but actually has value \"" << str << "\", "
|
||||
<< "which overflows.\n";
|
||||
|
|
|
@ -28,22 +28,22 @@ double Finish(Counter const& c, double cpu_time, double num_threads) {
|
|||
return v;
|
||||
}
|
||||
|
||||
void Finish(UserCounters *l, double cpu_time, double num_threads) {
|
||||
for (auto &c : *l) {
|
||||
void Finish(UserCounters* l, double cpu_time, double num_threads) {
|
||||
for (auto& c : *l) {
|
||||
c.second.value = Finish(c.second, cpu_time, num_threads);
|
||||
}
|
||||
}
|
||||
|
||||
void Increment(UserCounters *l, UserCounters const& r) {
|
||||
void Increment(UserCounters* l, UserCounters const& r) {
|
||||
// add counters present in both or just in *l
|
||||
for (auto &c : *l) {
|
||||
for (auto& c : *l) {
|
||||
auto it = r.find(c.first);
|
||||
if (it != r.end()) {
|
||||
c.second.value = c.second + it->second;
|
||||
}
|
||||
}
|
||||
// add counters present in r, but not in *l
|
||||
for (auto const &tc : r) {
|
||||
for (auto const& tc : r) {
|
||||
auto it = l->find(tc.first);
|
||||
if (it == l->end()) {
|
||||
(*l)[tc.first] = tc.second;
|
||||
|
@ -64,5 +64,5 @@ bool SameNames(UserCounters const& l, UserCounters const& r) {
|
|||
return true;
|
||||
}
|
||||
|
||||
} // end namespace internal
|
||||
} // end namespace benchmark
|
||||
} // end namespace internal
|
||||
} // end namespace benchmark
|
||||
|
|
|
@ -18,9 +18,9 @@ namespace benchmark {
|
|||
|
||||
// these counter-related functions are hidden to reduce API surface.
|
||||
namespace internal {
|
||||
void Finish(UserCounters *l, double time, double num_threads);
|
||||
void Increment(UserCounters *l, UserCounters const& r);
|
||||
void Finish(UserCounters* l, double time, double num_threads);
|
||||
void Increment(UserCounters* l, UserCounters const& r);
|
||||
bool SameNames(UserCounters const& l, UserCounters const& r);
|
||||
} // end namespace internal
|
||||
} // end namespace internal
|
||||
|
||||
} //end namespace benchmark
|
||||
} // end namespace benchmark
|
||||
|
|
|
@ -22,9 +22,9 @@
|
|||
#include <tuple>
|
||||
#include <vector>
|
||||
|
||||
#include "check.h"
|
||||
#include "string_util.h"
|
||||
#include "timers.h"
|
||||
#include "check.h"
|
||||
|
||||
// File format reference: http://edoceo.com/utilitas/csv-file-format.
|
||||
|
||||
|
@ -42,7 +42,7 @@ bool CSVReporter::ReportContext(const Context& context) {
|
|||
return true;
|
||||
}
|
||||
|
||||
void CSVReporter::ReportRuns(const std::vector<Run> & reports) {
|
||||
void CSVReporter::ReportRuns(const std::vector<Run>& reports) {
|
||||
std::ostream& Out = GetOutputStream();
|
||||
|
||||
if (!printed_header_) {
|
||||
|
@ -58,7 +58,8 @@ void CSVReporter::ReportRuns(const std::vector<Run> & reports) {
|
|||
Out << *B++;
|
||||
if (B != elements.end()) Out << ",";
|
||||
}
|
||||
for (auto B = user_counter_names_.begin(); B != user_counter_names_.end();) {
|
||||
for (auto B = user_counter_names_.begin();
|
||||
B != user_counter_names_.end();) {
|
||||
Out << ",\"" << *B++ << "\"";
|
||||
}
|
||||
Out << "\n";
|
||||
|
@ -69,9 +70,9 @@ void CSVReporter::ReportRuns(const std::vector<Run> & reports) {
|
|||
for (const auto& run : reports) {
|
||||
for (const auto& cnt : run.counters) {
|
||||
CHECK(user_counter_names_.find(cnt.first) != user_counter_names_.end())
|
||||
<< "All counters must be present in each run. "
|
||||
<< "Counter named \"" << cnt.first
|
||||
<< "\" was not in a run after being added to the header";
|
||||
<< "All counters must be present in each run. "
|
||||
<< "Counter named \"" << cnt.first
|
||||
<< "\" was not in a run after being added to the header";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -80,10 +81,9 @@ void CSVReporter::ReportRuns(const std::vector<Run> & reports) {
|
|||
for (const auto& run : reports) {
|
||||
PrintRunData(run);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void CSVReporter::PrintRunData(const Run & run) {
|
||||
void CSVReporter::PrintRunData(const Run& run) {
|
||||
std::ostream& Out = GetOutputStream();
|
||||
|
||||
// Field with embedded double-quote characters must be doubled and the field
|
||||
|
@ -135,9 +135,9 @@ void CSVReporter::PrintRunData(const Run & run) {
|
|||
Out << ",,"; // for error_occurred and error_message
|
||||
|
||||
// Print user counters
|
||||
for (const auto &ucn : user_counter_names_) {
|
||||
for (const auto& ucn : user_counter_names_) {
|
||||
auto it = run.counters.find(ucn);
|
||||
if(it == run.counters.end()) {
|
||||
if (it == run.counters.end()) {
|
||||
Out << ",";
|
||||
} else {
|
||||
Out << "," << it->second;
|
||||
|
|
|
@ -121,7 +121,7 @@ inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
|
|||
// because is provides nanosecond resolution (which is noticable at
|
||||
// least for PNaCl modules running on x86 Mac & Linux).
|
||||
// Initialize to always return 0 if clock_gettime fails.
|
||||
struct timespec ts = { 0, 0 };
|
||||
struct timespec ts = {0, 0};
|
||||
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||||
return static_cast<int64_t>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
|
||||
#elif defined(__aarch64__)
|
||||
|
@ -159,10 +159,10 @@ inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
|
|||
struct timeval tv;
|
||||
gettimeofday(&tv, nullptr);
|
||||
return static_cast<int64_t>(tv.tv_sec) * 1000000 + tv.tv_usec;
|
||||
#elif defined(__s390__) // Covers both s390 and s390x.
|
||||
#elif defined(__s390__) // Covers both s390 and s390x.
|
||||
// Return the CPU clock.
|
||||
uint64_t tsc;
|
||||
asm("stck %0" : "=Q" (tsc) : : "cc");
|
||||
asm("stck %0" : "=Q"(tsc) : : "cc");
|
||||
return tsc;
|
||||
#else
|
||||
// The soft failover to a generic implementation is automatic only for ARM.
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
// clang-format off
|
||||
|
||||
#ifndef __has_feature
|
||||
#define __has_feature(x) 0
|
||||
#endif
|
||||
|
@ -86,4 +88,6 @@
|
|||
#define BENCHMARK_UNREACHABLE() ((void)0)
|
||||
#endif
|
||||
|
||||
// clang-format on
|
||||
|
||||
#endif // BENCHMARK_INTERNAL_MACROS_H_
|
||||
|
|
|
@ -17,12 +17,12 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <iomanip> // for setprecision
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
#include <iomanip> // for setprecision
|
||||
#include <limits>
|
||||
|
||||
#include "string_util.h"
|
||||
#include "timers.h"
|
||||
|
@ -53,7 +53,7 @@ std::string FormatKV(std::string const& key, double value) {
|
|||
std::stringstream ss;
|
||||
ss << '"' << key << "\": ";
|
||||
|
||||
const auto max_digits10 = std::numeric_limits<decltype (value)>::max_digits10;
|
||||
const auto max_digits10 = std::numeric_limits<decltype(value)>::max_digits10;
|
||||
const auto max_fractional_digits10 = max_digits10 - 1;
|
||||
|
||||
ss << std::scientific << std::setprecision(max_fractional_digits10) << value;
|
||||
|
@ -161,40 +161,30 @@ void JSONReporter::PrintRunData(Run const& run) {
|
|||
}
|
||||
if (!run.report_big_o && !run.report_rms) {
|
||||
out << indent << FormatKV("iterations", run.iterations) << ",\n";
|
||||
out << indent
|
||||
<< FormatKV("real_time", run.GetAdjustedRealTime())
|
||||
<< ",\n";
|
||||
out << indent
|
||||
<< FormatKV("cpu_time", run.GetAdjustedCPUTime());
|
||||
out << indent << FormatKV("real_time", run.GetAdjustedRealTime()) << ",\n";
|
||||
out << indent << FormatKV("cpu_time", run.GetAdjustedCPUTime());
|
||||
out << ",\n"
|
||||
<< indent << FormatKV("time_unit", GetTimeUnitString(run.time_unit));
|
||||
} else if (run.report_big_o) {
|
||||
out << indent
|
||||
<< FormatKV("cpu_coefficient", run.GetAdjustedCPUTime())
|
||||
out << indent << FormatKV("cpu_coefficient", run.GetAdjustedCPUTime())
|
||||
<< ",\n";
|
||||
out << indent
|
||||
<< FormatKV("real_coefficient", run.GetAdjustedRealTime())
|
||||
out << indent << FormatKV("real_coefficient", run.GetAdjustedRealTime())
|
||||
<< ",\n";
|
||||
out << indent << FormatKV("big_o", GetBigOString(run.complexity)) << ",\n";
|
||||
out << indent << FormatKV("time_unit", GetTimeUnitString(run.time_unit));
|
||||
} else if (run.report_rms) {
|
||||
out << indent
|
||||
<< FormatKV("rms", run.GetAdjustedCPUTime());
|
||||
out << indent << FormatKV("rms", run.GetAdjustedCPUTime());
|
||||
}
|
||||
if (run.bytes_per_second > 0.0) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV("bytes_per_second", run.bytes_per_second);
|
||||
<< indent << FormatKV("bytes_per_second", run.bytes_per_second);
|
||||
}
|
||||
if (run.items_per_second > 0.0) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV("items_per_second", run.items_per_second);
|
||||
<< indent << FormatKV("items_per_second", run.items_per_second);
|
||||
}
|
||||
for(auto &c : run.counters) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV(c.first, c.second);
|
||||
for (auto& c : run.counters) {
|
||||
out << ",\n" << indent << FormatKV(c.first, c.second);
|
||||
}
|
||||
if (!run.report_label.empty()) {
|
||||
out << ",\n" << indent << FormatKV("label", run.report_label);
|
||||
|
@ -202,4 +192,4 @@ void JSONReporter::PrintRunData(Run const& run) {
|
|||
out << '\n';
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
||||
} // end namespace benchmark
|
||||
|
|
|
@ -66,8 +66,9 @@ inline LogType& GetLogInstanceForLevel(int level) {
|
|||
} // end namespace internal
|
||||
} // end namespace benchmark
|
||||
|
||||
// clang-format off
|
||||
#define VLOG(x) \
|
||||
(::benchmark::internal::GetLogInstanceForLevel(x) << "-- LOG(" << x << "):" \
|
||||
" ")
|
||||
|
||||
// clang-format on
|
||||
#endif
|
||||
|
|
24
src/re.h
24
src/re.h
|
@ -17,6 +17,8 @@
|
|||
|
||||
#include "internal_macros.h"
|
||||
|
||||
// clang-format off
|
||||
|
||||
#if !defined(HAVE_STD_REGEX) && \
|
||||
!defined(HAVE_GNU_POSIX_REGEX) && \
|
||||
!defined(HAVE_POSIX_REGEX)
|
||||
|
@ -45,6 +47,9 @@
|
|||
#else
|
||||
#error No regular expression backend was found!
|
||||
#endif
|
||||
|
||||
// clang-format on
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "check.h"
|
||||
|
@ -76,7 +81,7 @@ class Regex {
|
|||
#elif defined(HAVE_POSIX_REGEX) || defined(HAVE_GNU_POSIX_REGEX)
|
||||
regex_t re_;
|
||||
#else
|
||||
#error No regular expression backend implementation available
|
||||
#error No regular expression backend implementation available
|
||||
#endif
|
||||
};
|
||||
|
||||
|
@ -84,20 +89,21 @@ class Regex {
|
|||
|
||||
inline bool Regex::Init(const std::string& spec, std::string* error) {
|
||||
#ifdef BENCHMARK_HAS_NO_EXCEPTIONS
|
||||
((void)error); // suppress unused warning
|
||||
((void)error); // suppress unused warning
|
||||
#else
|
||||
try {
|
||||
#endif
|
||||
re_ = std::regex(spec, std::regex_constants::extended);
|
||||
init_ = true;
|
||||
re_ = std::regex(spec, std::regex_constants::extended);
|
||||
init_ = true;
|
||||
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
|
||||
} catch (const std::regex_error& e) {
|
||||
if (error) {
|
||||
*error = e.what();
|
||||
}
|
||||
}
|
||||
catch (const std::regex_error& e) {
|
||||
if (error) {
|
||||
*error = e.what();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return init_;
|
||||
return init_;
|
||||
}
|
||||
|
||||
inline Regex::~Regex() {}
|
||||
|
|
|
@ -68,7 +68,7 @@ void BenchmarkReporter::PrintBasicContext(std::ostream *out,
|
|||
}
|
||||
|
||||
// No initializer because it's already initialized to NULL.
|
||||
const char* BenchmarkReporter::Context::executable_name;
|
||||
const char *BenchmarkReporter::Context::executable_name;
|
||||
|
||||
BenchmarkReporter::Context::Context() : cpu_info(CPUInfo::Get()) {}
|
||||
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <numeric>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <numeric>
|
||||
#include "check.h"
|
||||
#include "statistics.h"
|
||||
|
||||
|
@ -43,9 +43,9 @@ double StatisticsMedian(const std::vector<double>& v) {
|
|||
|
||||
// did we have an odd number of samples?
|
||||
// if yes, then center is the median
|
||||
// it no, then we are looking for the average between center and the value before
|
||||
if(v.size() % 2 == 1)
|
||||
return *center;
|
||||
// it no, then we are looking for the average between center and the value
|
||||
// before
|
||||
if (v.size() % 2 == 1) return *center;
|
||||
auto center2 = copy.begin() + v.size() / 2 - 1;
|
||||
std::nth_element(copy.begin(), center2, copy.end());
|
||||
return (*center + *center2) / 2.0;
|
||||
|
@ -68,8 +68,7 @@ double StatisticsStdDev(const std::vector<double>& v) {
|
|||
if (v.empty()) return mean;
|
||||
|
||||
// Sample standard deviation is undefined for n = 1
|
||||
if (v.size() == 1)
|
||||
return 0.0;
|
||||
if (v.size() == 1) return 0.0;
|
||||
|
||||
const double avg_squares = SumSquares(v) * (1.0 / v.size());
|
||||
return Sqrt(v.size() / (v.size() - 1.0) * (avg_squares - Sqr(mean)));
|
||||
|
@ -108,11 +107,11 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
Counter c;
|
||||
std::vector<double> s;
|
||||
};
|
||||
std::map< std::string, CounterStat > counter_stats;
|
||||
for(Run const& r : reports) {
|
||||
for(auto const& cnt : r.counters) {
|
||||
std::map<std::string, CounterStat> counter_stats;
|
||||
for (Run const& r : reports) {
|
||||
for (auto const& cnt : r.counters) {
|
||||
auto it = counter_stats.find(cnt.first);
|
||||
if(it == counter_stats.end()) {
|
||||
if (it == counter_stats.end()) {
|
||||
counter_stats.insert({cnt.first, {cnt.second, std::vector<double>{}}});
|
||||
it = counter_stats.find(cnt.first);
|
||||
it->second.s.reserve(reports.size());
|
||||
|
@ -132,7 +131,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
items_per_second_stat.emplace_back(run.items_per_second);
|
||||
bytes_per_second_stat.emplace_back(run.bytes_per_second);
|
||||
// user counters
|
||||
for(auto const& cnt : run.counters) {
|
||||
for (auto const& cnt : run.counters) {
|
||||
auto it = counter_stats.find(cnt.first);
|
||||
CHECK_NE(it, counter_stats.end());
|
||||
it->second.s.emplace_back(cnt.second);
|
||||
|
@ -148,7 +147,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
}
|
||||
}
|
||||
|
||||
for(const auto& Stat : *reports[0].statistics) {
|
||||
for (const auto& Stat : *reports[0].statistics) {
|
||||
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
||||
Run data;
|
||||
data.benchmark_name = reports[0].benchmark_name + "_" + Stat.name_;
|
||||
|
@ -163,7 +162,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
data.time_unit = reports[0].time_unit;
|
||||
|
||||
// user counters
|
||||
for(auto const& kv : counter_stats) {
|
||||
for (auto const& kv : counter_stats) {
|
||||
const auto uc_stat = Stat.compute_(kv.second.s);
|
||||
auto c = Counter(uc_stat, counter_stats[kv.first].c.flags);
|
||||
data.counters[kv.first] = c;
|
||||
|
|
|
@ -19,8 +19,7 @@ inline std::ostream& StrCatImp(std::ostream& out) BENCHMARK_NOEXCEPT {
|
|||
}
|
||||
|
||||
template <class First, class... Rest>
|
||||
inline std::ostream& StrCatImp(std::ostream& out, First&& f,
|
||||
Rest&&... rest) {
|
||||
inline std::ostream& StrCatImp(std::ostream& out, First&& f, Rest&&... rest) {
|
||||
out << std::forward<First>(f);
|
||||
return StrCatImp(out, std::forward<Rest>(rest)...);
|
||||
}
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
#define TEST_OUTPUT_TEST_H
|
||||
|
||||
#undef NDEBUG
|
||||
#include <functional>
|
||||
#include <initializer_list>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include <functional>
|
||||
#include <sstream>
|
||||
|
||||
#include "../src/re.h"
|
||||
#include "benchmark/benchmark.h"
|
||||
|
@ -73,21 +73,20 @@ void RunOutputTests(int argc, char* argv[]);
|
|||
// will be the subject of a call to checker_function
|
||||
// checker_function: should be of type ResultsCheckFn (see below)
|
||||
#define CHECK_BENCHMARK_RESULTS(bm_name_pattern, checker_function) \
|
||||
size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function)
|
||||
size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function)
|
||||
|
||||
struct Results;
|
||||
typedef std::function< void(Results const&) > ResultsCheckFn;
|
||||
typedef std::function<void(Results const&)> ResultsCheckFn;
|
||||
|
||||
size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn);
|
||||
|
||||
// Class holding the results of a benchmark.
|
||||
// It is passed in calls to checker functions.
|
||||
struct Results {
|
||||
|
||||
// the benchmark name
|
||||
std::string name;
|
||||
// the benchmark fields
|
||||
std::map< std::string, std::string > values;
|
||||
std::map<std::string, std::string> values;
|
||||
|
||||
Results(const std::string& n) : name(n) {}
|
||||
|
||||
|
@ -102,18 +101,18 @@ struct Results {
|
|||
// it is better to use fuzzy float checks for this, as the float
|
||||
// ASCII formatting is lossy.
|
||||
double DurationRealTime() const {
|
||||
return GetAs< double >("iterations") * GetTime(kRealTime);
|
||||
return GetAs<double>("iterations") * GetTime(kRealTime);
|
||||
}
|
||||
// get the cpu_time duration of the benchmark in seconds
|
||||
double DurationCPUTime() const {
|
||||
return GetAs< double >("iterations") * GetTime(kCpuTime);
|
||||
return GetAs<double>("iterations") * GetTime(kCpuTime);
|
||||
}
|
||||
|
||||
// get the string for a result by name, or nullptr if the name
|
||||
// is not found
|
||||
const std::string* Get(const char* entry_name) const {
|
||||
auto it = values.find(entry_name);
|
||||
if(it == values.end()) return nullptr;
|
||||
if (it == values.end()) return nullptr;
|
||||
return &it->second;
|
||||
}
|
||||
|
||||
|
@ -126,15 +125,15 @@ struct Results {
|
|||
// as a double, and only then converted to the asked type.
|
||||
template <class T>
|
||||
T GetCounterAs(const char* entry_name) const {
|
||||
double dval = GetAs< double >(entry_name);
|
||||
T tval = static_cast< T >(dval);
|
||||
double dval = GetAs<double>(entry_name);
|
||||
T tval = static_cast<T>(dval);
|
||||
return tval;
|
||||
}
|
||||
};
|
||||
|
||||
template <class T>
|
||||
T Results::GetAs(const char* entry_name) const {
|
||||
auto *sv = Get(entry_name);
|
||||
auto* sv = Get(entry_name);
|
||||
CHECK(sv != nullptr && !sv->empty());
|
||||
std::stringstream ss;
|
||||
ss << *sv;
|
||||
|
@ -148,6 +147,8 @@ T Results::GetAs(const char* entry_name) const {
|
|||
// Macros to help in result checking. Do not use them with arguments causing
|
||||
// side-effects.
|
||||
|
||||
// clang-format off
|
||||
|
||||
#define _CHECK_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value) \
|
||||
CONCAT(CHECK_, relationship) \
|
||||
(entry.getfn< var_type >(var_name), (value)) << "\n" \
|
||||
|
@ -188,6 +189,8 @@ T Results::GetAs(const char* entry_name) const {
|
|||
#define CHECK_FLOAT_COUNTER_VALUE(entry, var_name, relationship, value, eps_factor) \
|
||||
_CHECK_FLOAT_RESULT_VALUE(entry, GetCounterAs, double, var_name, relationship, value, eps_factor)
|
||||
|
||||
// clang-format on
|
||||
|
||||
// ========================================================================= //
|
||||
// --------------------------- Misc Utilities ------------------------------ //
|
||||
// ========================================================================= //
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <cstring>
|
||||
|
||||
#include "../src/benchmark_api_internal.h"
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include "../src/re.h" // NOTE: re.h is for internal use only
|
||||
#include "output_test.h"
|
||||
#include "../src/benchmark_api_internal.h"
|
||||
|
||||
// ========================================================================= //
|
||||
// ------------------------------ Internals -------------------------------- //
|
||||
|
@ -33,6 +33,7 @@ TestCaseList& GetTestCaseList(TestCaseID ID) {
|
|||
|
||||
SubMap& GetSubstitutions() {
|
||||
// Don't use 'dec_re' from header because it may not yet be initialized.
|
||||
// clang-format off
|
||||
static std::string safe_dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
|
||||
static SubMap map = {
|
||||
{"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
|
||||
|
@ -57,6 +58,7 @@ SubMap& GetSubstitutions() {
|
|||
"," + safe_dec_re + ",,,"},
|
||||
{"%csv_label_report_begin", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,"},
|
||||
{"%csv_label_report_end", ",,"}};
|
||||
// clang-format on
|
||||
return map;
|
||||
}
|
||||
|
||||
|
@ -147,9 +149,9 @@ class TestReporter : public benchmark::BenchmarkReporter {
|
|||
}
|
||||
|
||||
private:
|
||||
std::vector<benchmark::BenchmarkReporter *> reporters_;
|
||||
std::vector<benchmark::BenchmarkReporter*> reporters_;
|
||||
};
|
||||
}
|
||||
} // namespace
|
||||
|
||||
} // end namespace internal
|
||||
|
||||
|
@ -163,28 +165,25 @@ namespace internal {
|
|||
// It works by parsing the CSV output to read the results.
|
||||
class ResultsChecker {
|
||||
public:
|
||||
|
||||
struct PatternAndFn : public TestCase { // reusing TestCase for its regexes
|
||||
struct PatternAndFn : public TestCase { // reusing TestCase for its regexes
|
||||
PatternAndFn(const std::string& rx, ResultsCheckFn fn_)
|
||||
: TestCase(rx), fn(fn_) {}
|
||||
: TestCase(rx), fn(fn_) {}
|
||||
ResultsCheckFn fn;
|
||||
};
|
||||
|
||||
std::vector< PatternAndFn > check_patterns;
|
||||
std::vector< Results > results;
|
||||
std::vector< std::string > field_names;
|
||||
std::vector<PatternAndFn> check_patterns;
|
||||
std::vector<Results> results;
|
||||
std::vector<std::string> field_names;
|
||||
|
||||
void Add(const std::string& entry_pattern, ResultsCheckFn fn);
|
||||
|
||||
void CheckResults(std::stringstream& output);
|
||||
|
||||
private:
|
||||
|
||||
void SetHeader_(const std::string& csv_header);
|
||||
void SetValues_(const std::string& entry_csv_line);
|
||||
|
||||
std::vector< std::string > SplitCsv_(const std::string& line);
|
||||
|
||||
std::vector<std::string> SplitCsv_(const std::string& line);
|
||||
};
|
||||
|
||||
// store the static ResultsChecker in a function to prevent initialization
|
||||
|
@ -207,7 +206,7 @@ void ResultsChecker::CheckResults(std::stringstream& output) {
|
|||
// clear before calling tellg()
|
||||
output.clear();
|
||||
// seek to zero only when needed
|
||||
if(output.tellg() > start) output.seekg(start);
|
||||
if (output.tellg() > start) output.seekg(start);
|
||||
// and just in case
|
||||
output.clear();
|
||||
}
|
||||
|
@ -218,18 +217,18 @@ void ResultsChecker::CheckResults(std::stringstream& output) {
|
|||
CHECK(output.good());
|
||||
std::getline(output, line);
|
||||
if (on_first) {
|
||||
SetHeader_(line); // this is important
|
||||
SetHeader_(line); // this is important
|
||||
on_first = false;
|
||||
continue;
|
||||
}
|
||||
SetValues_(line);
|
||||
}
|
||||
// finally we can call the subscribed check functions
|
||||
for(const auto& p : check_patterns) {
|
||||
for (const auto& p : check_patterns) {
|
||||
VLOG(2) << "--------------------------------\n";
|
||||
VLOG(2) << "checking for benchmarks matching " << p.regex_str << "...\n";
|
||||
for(const auto& r : results) {
|
||||
if(!p.regex->Match(r.name)) {
|
||||
for (const auto& r : results) {
|
||||
if (!p.regex->Match(r.name)) {
|
||||
VLOG(2) << p.regex_str << " is not matched by " << r.name << "\n";
|
||||
continue;
|
||||
} else {
|
||||
|
@ -249,51 +248,50 @@ void ResultsChecker::SetHeader_(const std::string& csv_header) {
|
|||
|
||||
// set the values for a benchmark
|
||||
void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
|
||||
if(entry_csv_line.empty()) return; // some lines are empty
|
||||
if (entry_csv_line.empty()) return; // some lines are empty
|
||||
CHECK(!field_names.empty());
|
||||
auto vals = SplitCsv_(entry_csv_line);
|
||||
CHECK_EQ(vals.size(), field_names.size());
|
||||
results.emplace_back(vals[0]); // vals[0] is the benchmark name
|
||||
auto &entry = results.back();
|
||||
results.emplace_back(vals[0]); // vals[0] is the benchmark name
|
||||
auto& entry = results.back();
|
||||
for (size_t i = 1, e = vals.size(); i < e; ++i) {
|
||||
entry.values[field_names[i]] = vals[i];
|
||||
}
|
||||
}
|
||||
|
||||
// a quick'n'dirty csv splitter (eliminating quotes)
|
||||
std::vector< std::string > ResultsChecker::SplitCsv_(const std::string& line) {
|
||||
std::vector< std::string > out;
|
||||
if(line.empty()) return out;
|
||||
if(!field_names.empty()) out.reserve(field_names.size());
|
||||
std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
|
||||
std::vector<std::string> out;
|
||||
if (line.empty()) return out;
|
||||
if (!field_names.empty()) out.reserve(field_names.size());
|
||||
size_t prev = 0, pos = line.find_first_of(','), curr = pos;
|
||||
while(pos != line.npos) {
|
||||
while (pos != line.npos) {
|
||||
CHECK(curr > 0);
|
||||
if(line[prev] == '"') ++prev;
|
||||
if(line[curr-1] == '"') --curr;
|
||||
out.push_back(line.substr(prev, curr-prev));
|
||||
if (line[prev] == '"') ++prev;
|
||||
if (line[curr - 1] == '"') --curr;
|
||||
out.push_back(line.substr(prev, curr - prev));
|
||||
prev = pos + 1;
|
||||
pos = line.find_first_of(',', pos + 1);
|
||||
curr = pos;
|
||||
}
|
||||
curr = line.size();
|
||||
if(line[prev] == '"') ++prev;
|
||||
if(line[curr-1] == '"') --curr;
|
||||
out.push_back(line.substr(prev, curr-prev));
|
||||
if (line[prev] == '"') ++prev;
|
||||
if (line[curr - 1] == '"') --curr;
|
||||
out.push_back(line.substr(prev, curr - prev));
|
||||
return out;
|
||||
}
|
||||
|
||||
} // end namespace internal
|
||||
|
||||
size_t AddChecker(const char* bm_name, ResultsCheckFn fn)
|
||||
{
|
||||
auto &rc = internal::GetResultsChecker();
|
||||
size_t AddChecker(const char* bm_name, ResultsCheckFn fn) {
|
||||
auto& rc = internal::GetResultsChecker();
|
||||
rc.Add(bm_name, fn);
|
||||
return rc.results.size();
|
||||
}
|
||||
|
||||
int Results::NumThreads() const {
|
||||
auto pos = name.find("/threads:");
|
||||
if(pos == name.npos) return 1;
|
||||
if (pos == name.npos) return 1;
|
||||
auto end = name.find('/', pos + 9);
|
||||
std::stringstream ss;
|
||||
ss << name.substr(pos + 9, end);
|
||||
|
@ -305,17 +303,17 @@ int Results::NumThreads() const {
|
|||
|
||||
double Results::GetTime(BenchmarkTime which) const {
|
||||
CHECK(which == kCpuTime || which == kRealTime);
|
||||
const char *which_str = which == kCpuTime ? "cpu_time" : "real_time";
|
||||
double val = GetAs< double >(which_str);
|
||||
const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
|
||||
double val = GetAs<double>(which_str);
|
||||
auto unit = Get("time_unit");
|
||||
CHECK(unit);
|
||||
if(*unit == "ns") {
|
||||
if (*unit == "ns") {
|
||||
return val * 1.e-9;
|
||||
} else if(*unit == "us") {
|
||||
} else if (*unit == "us") {
|
||||
return val * 1.e-6;
|
||||
} else if(*unit == "ms") {
|
||||
} else if (*unit == "ms") {
|
||||
return val * 1.e-3;
|
||||
} else if(*unit == "s") {
|
||||
} else if (*unit == "s") {
|
||||
return val;
|
||||
} else {
|
||||
CHECK(1 == 0) << "unknown time unit: " << *unit;
|
||||
|
@ -333,7 +331,7 @@ TestCase::TestCase(std::string re, int rule)
|
|||
substituted_regex(internal::PerformSubstitutions(regex_str)),
|
||||
regex(std::make_shared<benchmark::Regex>()) {
|
||||
std::string err_str;
|
||||
regex->Init(substituted_regex,& err_str);
|
||||
regex->Init(substituted_regex, &err_str);
|
||||
CHECK(err_str.empty()) << "Could not construct regex \"" << substituted_regex
|
||||
<< "\""
|
||||
<< "\n originally \"" << regex_str << "\""
|
||||
|
@ -367,7 +365,7 @@ int SetSubstitutions(
|
|||
void RunOutputTests(int argc, char* argv[]) {
|
||||
using internal::GetTestCaseList;
|
||||
benchmark::Initialize(&argc, argv);
|
||||
auto options = benchmark::internal::GetOutputOptions(/*force_no_color*/true);
|
||||
auto options = benchmark::internal::GetOutputOptions(/*force_no_color*/ true);
|
||||
benchmark::ConsoleReporter CR(options);
|
||||
benchmark::JSONReporter JR;
|
||||
benchmark::CSVReporter CSVR;
|
||||
|
@ -416,7 +414,7 @@ void RunOutputTests(int argc, char* argv[]) {
|
|||
|
||||
// now that we know the output is as expected, we can dispatch
|
||||
// the checks to subscribees.
|
||||
auto &csv = TestCases[2];
|
||||
auto& csv = TestCases[2];
|
||||
// would use == but gcc spits a warning
|
||||
CHECK(std::strcmp(csv.name, "CSVReporter") == 0);
|
||||
internal::GetResultsChecker().CheckResults(csv.out_stream);
|
||||
|
|
|
@ -29,6 +29,7 @@ struct TestCase {
|
|||
typedef benchmark::BenchmarkReporter::Run Run;
|
||||
|
||||
void CheckRun(Run const& run) const {
|
||||
// clang-format off
|
||||
CHECK(name == run.benchmark_name) << "expected " << name << " got "
|
||||
<< run.benchmark_name;
|
||||
if (label) {
|
||||
|
@ -37,6 +38,7 @@ struct TestCase {
|
|||
} else {
|
||||
CHECK(run.report_label == "");
|
||||
}
|
||||
// clang-format on
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -9,10 +9,9 @@
|
|||
// ---------------------- Testing Prologue Output -------------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
ADD_CASES(TC_ConsoleOut,
|
||||
{{"^[-]+$", MR_Next},
|
||||
{"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
|
||||
{"^[-]+$", MR_Next}});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^[-]+$", MR_Next},
|
||||
{"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
|
||||
{"^[-]+$", MR_Next}});
|
||||
static int AddContextCases() {
|
||||
AddCases(TC_ConsoleErr,
|
||||
{
|
||||
|
@ -20,14 +19,15 @@ static int AddContextCases() {
|
|||
{"Running .*/reporter_output_test(\\.exe)?$", MR_Next},
|
||||
{"Run on \\(%int X %float MHz CPU s\\)", MR_Next},
|
||||
});
|
||||
AddCases(TC_JSONOut, {{"^\\{", MR_Default},
|
||||
{"\"context\":", MR_Next},
|
||||
{"\"date\": \"", MR_Next},
|
||||
{"\"executable\": \".*/reporter_output_test(\\.exe)?\",", MR_Next},
|
||||
{"\"num_cpus\": %int,$", MR_Next},
|
||||
{"\"mhz_per_cpu\": %float,$", MR_Next},
|
||||
{"\"cpu_scaling_enabled\": ", MR_Next},
|
||||
{"\"caches\": \\[$", MR_Next}});
|
||||
AddCases(TC_JSONOut,
|
||||
{{"^\\{", MR_Default},
|
||||
{"\"context\":", MR_Next},
|
||||
{"\"date\": \"", MR_Next},
|
||||
{"\"executable\": \".*/reporter_output_test(\\.exe)?\",", MR_Next},
|
||||
{"\"num_cpus\": %int,$", MR_Next},
|
||||
{"\"mhz_per_cpu\": %float,$", MR_Next},
|
||||
{"\"cpu_scaling_enabled\": ", MR_Next},
|
||||
{"\"caches\": \\[$", MR_Next}});
|
||||
auto const& Caches = benchmark::CPUInfo::Get().caches;
|
||||
if (!Caches.empty()) {
|
||||
AddCases(TC_ConsoleErr, {{"CPU Caches:$", MR_Next}});
|
||||
|
@ -348,9 +348,12 @@ void BM_UserStats(benchmark::State& state) {
|
|||
for (auto _ : state) {
|
||||
}
|
||||
}
|
||||
// clang-format off
|
||||
BENCHMARK(BM_UserStats)
|
||||
->Repetitions(3)
|
||||
->ComputeStatistics("", UserStatistics);
|
||||
->Repetitions(3)
|
||||
->ComputeStatistics("", UserStatistics);
|
||||
// clang-format on
|
||||
|
||||
// check that user-provided stats is calculated, and is after the default-ones
|
||||
// empty string as name is intentional, it would sort before anything else
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_UserStats/repeats:3 %console_report$"},
|
||||
|
|
|
@ -33,8 +33,8 @@ struct TestCase {
|
|||
typedef benchmark::BenchmarkReporter::Run Run;
|
||||
|
||||
void CheckRun(Run const& run) const {
|
||||
CHECK(name == run.benchmark_name) << "expected " << name << " got "
|
||||
<< run.benchmark_name;
|
||||
CHECK(name == run.benchmark_name)
|
||||
<< "expected " << name << " got " << run.benchmark_name;
|
||||
CHECK(error_occurred == run.error_occurred);
|
||||
CHECK(error_message == run.error_message);
|
||||
if (error_occurred) {
|
||||
|
@ -70,7 +70,6 @@ void BM_error_before_running(benchmark::State& state) {
|
|||
BENCHMARK(BM_error_before_running);
|
||||
ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
|
||||
|
||||
|
||||
void BM_error_before_running_batch(benchmark::State& state) {
|
||||
state.SkipWithError("error message");
|
||||
while (state.KeepRunningBatch(17)) {
|
||||
|
@ -124,7 +123,7 @@ void BM_error_during_running_ranged_for(benchmark::State& state) {
|
|||
// Test the unfortunate but documented behavior that the ranged-for loop
|
||||
// doesn't automatically terminate when SkipWithError is set.
|
||||
assert(++It != End);
|
||||
break; // Required behavior
|
||||
break; // Required behavior
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -133,8 +132,6 @@ ADD_CASES("BM_error_during_running_ranged_for",
|
|||
{{"/1/iterations:5", true, "error message"},
|
||||
{"/2/iterations:5", false, ""}});
|
||||
|
||||
|
||||
|
||||
void BM_error_after_running(benchmark::State& state) {
|
||||
for (auto _ : state) {
|
||||
benchmark::DoNotOptimize(state.iterations());
|
||||
|
|
|
@ -4,11 +4,13 @@
|
|||
#pragma clang diagnostic ignored "-Wreturn-type"
|
||||
#endif
|
||||
|
||||
// clang-format off
|
||||
extern "C" {
|
||||
extern int ExternInt;
|
||||
benchmark::State& GetState();
|
||||
void Fn();
|
||||
}
|
||||
// clang-format on
|
||||
|
||||
using benchmark::State;
|
||||
|
||||
|
|
|
@ -7,22 +7,22 @@
|
|||
|
||||
namespace {
|
||||
TEST(StatisticsTest, Mean) {
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({42,42,42,42}), 42.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1,2,3,4}), 2.5);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({42, 42, 42, 42}), 42.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 3, 4}), 2.5);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 5, 10, 10, 14}), 7.0);
|
||||
}
|
||||
|
||||
TEST(StatisticsTest, Median) {
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({42,42,42,42}), 42.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1,2,3,4}), 2.5);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({42, 42, 42, 42}), 42.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 3, 4}), 2.5);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 5, 10, 10}), 5.0);
|
||||
}
|
||||
|
||||
TEST(StatisticsTest, StdDev) {
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({101, 101, 101, 101}), 0.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({1,2,3}), 1.0);
|
||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({1, 2, 3}), 1.0);
|
||||
EXPECT_FLOAT_EQ(benchmark::StatisticsStdDev({1.5, 2.4, 3.3, 4.2, 5.1}),
|
||||
1.42302495);
|
||||
1.42302495);
|
||||
}
|
||||
|
||||
} // end namespace
|
||||
|
|
|
@ -4,15 +4,15 @@
|
|||
#include <cassert>
|
||||
#include <memory>
|
||||
|
||||
template<typename T>
|
||||
template <typename T>
|
||||
class MyFixture : public ::benchmark::Fixture {
|
||||
public:
|
||||
public:
|
||||
MyFixture() : data(0) {}
|
||||
|
||||
T data;
|
||||
};
|
||||
|
||||
BENCHMARK_TEMPLATE_F(MyFixture, Foo, int)(benchmark::State &st) {
|
||||
BENCHMARK_TEMPLATE_F(MyFixture, Foo, int)(benchmark::State& st) {
|
||||
for (auto _ : st) {
|
||||
data += 1;
|
||||
}
|
||||
|
|
|
@ -7,9 +7,11 @@
|
|||
// @todo: <jpmag> this checks the full output at once; the rule for
|
||||
// CounterSet1 was failing because it was not matching "^[-]+$".
|
||||
// @todo: <jpmag> check that the counters are vertically aligned.
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
// keeping these lines long improves readability, so:
|
||||
// clang-format off
|
||||
ADD_CASES(
|
||||
TC_ConsoleOut,
|
||||
{
|
||||
// keeping these lines long improves readability, so:
|
||||
// clang-format off
|
||||
{"^[-]+$", MR_Next},
|
||||
{"^Benchmark %s Time %s CPU %s Iterations %s Bar %s Bat %s Baz %s Foo %s Frob %s Lob$", MR_Next},
|
||||
{"^[-]+$", MR_Next},
|
||||
|
@ -44,8 +46,8 @@ ADD_CASES(TC_ConsoleOut, {
|
|||
{"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
|
||||
{"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
|
||||
{"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$"},
|
||||
// clang-format on
|
||||
});
|
||||
// clang-format on
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {{"%csv_header,"
|
||||
"\"Bar\",\"Bat\",\"Baz\",\"Foo\",\"Frob\",\"Lob\""}});
|
||||
|
||||
|
@ -58,12 +60,12 @@ void BM_Counters_Tabular(benchmark::State& state) {
|
|||
}
|
||||
namespace bm = benchmark;
|
||||
state.counters.insert({
|
||||
{"Foo", { 1, bm::Counter::kAvgThreads}},
|
||||
{"Bar", { 2, bm::Counter::kAvgThreads}},
|
||||
{"Baz", { 4, bm::Counter::kAvgThreads}},
|
||||
{"Bat", { 8, bm::Counter::kAvgThreads}},
|
||||
{"Frob", {16, bm::Counter::kAvgThreads}},
|
||||
{"Lob", {32, bm::Counter::kAvgThreads}},
|
||||
{"Foo", {1, bm::Counter::kAvgThreads}},
|
||||
{"Bar", {2, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {4, bm::Counter::kAvgThreads}},
|
||||
{"Bat", {8, bm::Counter::kAvgThreads}},
|
||||
{"Frob", {16, bm::Counter::kAvgThreads}},
|
||||
{"Lob", {32, bm::Counter::kAvgThreads}},
|
||||
});
|
||||
}
|
||||
BENCHMARK(BM_Counters_Tabular)->ThreadRange(1, 16);
|
||||
|
@ -102,12 +104,12 @@ void BM_CounterRates_Tabular(benchmark::State& state) {
|
|||
}
|
||||
namespace bm = benchmark;
|
||||
state.counters.insert({
|
||||
{"Foo", { 1, bm::Counter::kAvgThreadsRate}},
|
||||
{"Bar", { 2, bm::Counter::kAvgThreadsRate}},
|
||||
{"Baz", { 4, bm::Counter::kAvgThreadsRate}},
|
||||
{"Bat", { 8, bm::Counter::kAvgThreadsRate}},
|
||||
{"Frob", {16, bm::Counter::kAvgThreadsRate}},
|
||||
{"Lob", {32, bm::Counter::kAvgThreadsRate}},
|
||||
{"Foo", {1, bm::Counter::kAvgThreadsRate}},
|
||||
{"Bar", {2, bm::Counter::kAvgThreadsRate}},
|
||||
{"Baz", {4, bm::Counter::kAvgThreadsRate}},
|
||||
{"Bat", {8, bm::Counter::kAvgThreadsRate}},
|
||||
{"Frob", {16, bm::Counter::kAvgThreadsRate}},
|
||||
{"Lob", {32, bm::Counter::kAvgThreadsRate}},
|
||||
});
|
||||
}
|
||||
BENCHMARK(BM_CounterRates_Tabular)->ThreadRange(1, 16);
|
||||
|
@ -129,12 +131,12 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_CounterRates_Tabular/threads:%int\",%csv_report,"
|
|||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckTabularRate(Results const& e) {
|
||||
double t = e.DurationCPUTime();
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Foo", EQ, 1./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Bar", EQ, 2./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Baz", EQ, 4./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Bat", EQ, 8./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Frob", EQ, 16./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Lob", EQ, 32./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Foo", EQ, 1. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Bar", EQ, 2. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Baz", EQ, 4. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Bat", EQ, 8. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Frob", EQ, 16. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "Lob", EQ, 32. / t, 0.001);
|
||||
}
|
||||
CHECK_BENCHMARK_RESULTS("BM_CounterRates_Tabular/threads:%int",
|
||||
&CheckTabularRate);
|
||||
|
@ -149,9 +151,9 @@ void BM_CounterSet0_Tabular(benchmark::State& state) {
|
|||
}
|
||||
namespace bm = benchmark;
|
||||
state.counters.insert({
|
||||
{"Foo", {10, bm::Counter::kAvgThreads}},
|
||||
{"Bar", {20, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {40, bm::Counter::kAvgThreads}},
|
||||
{"Foo", {10, bm::Counter::kAvgThreads}},
|
||||
{"Bar", {20, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {40, bm::Counter::kAvgThreads}},
|
||||
});
|
||||
}
|
||||
BENCHMARK(BM_CounterSet0_Tabular)->ThreadRange(1, 16);
|
||||
|
@ -181,9 +183,9 @@ void BM_CounterSet1_Tabular(benchmark::State& state) {
|
|||
}
|
||||
namespace bm = benchmark;
|
||||
state.counters.insert({
|
||||
{"Foo", {15, bm::Counter::kAvgThreads}},
|
||||
{"Bar", {25, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {45, bm::Counter::kAvgThreads}},
|
||||
{"Foo", {15, bm::Counter::kAvgThreads}},
|
||||
{"Bar", {25, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {45, bm::Counter::kAvgThreads}},
|
||||
});
|
||||
}
|
||||
BENCHMARK(BM_CounterSet1_Tabular)->ThreadRange(1, 16);
|
||||
|
@ -217,9 +219,9 @@ void BM_CounterSet2_Tabular(benchmark::State& state) {
|
|||
}
|
||||
namespace bm = benchmark;
|
||||
state.counters.insert({
|
||||
{"Foo", {10, bm::Counter::kAvgThreads}},
|
||||
{"Bat", {30, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {40, bm::Counter::kAvgThreads}},
|
||||
{"Foo", {10, bm::Counter::kAvgThreads}},
|
||||
{"Bat", {30, bm::Counter::kAvgThreads}},
|
||||
{"Baz", {40, bm::Counter::kAvgThreads}},
|
||||
});
|
||||
}
|
||||
BENCHMARK(BM_CounterSet2_Tabular)->ThreadRange(1, 16);
|
||||
|
|
|
@ -8,12 +8,16 @@
|
|||
// ---------------------- Testing Prologue Output -------------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
// clang-format off
|
||||
|
||||
ADD_CASES(TC_ConsoleOut,
|
||||
{{"^[-]+$", MR_Next},
|
||||
{"^Benchmark %s Time %s CPU %s Iterations UserCounters...$", MR_Next},
|
||||
{"^[-]+$", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"%csv_header,\"bar\",\"foo\""}});
|
||||
|
||||
// clang-format on
|
||||
|
||||
// ========================================================================= //
|
||||
// ------------------------- Simple Counters Output ------------------------ //
|
||||
// ========================================================================= //
|
||||
|
@ -25,7 +29,8 @@ void BM_Counters_Simple(benchmark::State& state) {
|
|||
state.counters["bar"] = 2 * (double)state.iterations();
|
||||
}
|
||||
BENCHMARK(BM_Counters_Simple);
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Simple %console_report bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_ConsoleOut,
|
||||
{{"^BM_Counters_Simple %console_report bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Simple\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
|
@ -38,10 +43,10 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Simple\",%csv_report,%float,%float$"}});
|
|||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckSimple(Results const& e) {
|
||||
double its = e.GetAs< double >("iterations");
|
||||
double its = e.GetAs<double>("iterations");
|
||||
CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
|
||||
// check that the value of bar is within 0.1% of the expected value
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2.*its, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. * its, 0.001);
|
||||
}
|
||||
CHECK_BENCHMARK_RESULTS("BM_Counters_Simple", &CheckSimple);
|
||||
|
||||
|
@ -49,7 +54,9 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_Simple", &CheckSimple);
|
|||
// --------------------- Counters+Items+Bytes/s Output --------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
namespace { int num_calls1 = 0; }
|
||||
namespace {
|
||||
int num_calls1 = 0;
|
||||
}
|
||||
void BM_Counters_WithBytesAndItemsPSec(benchmark::State& state) {
|
||||
for (auto _ : state) {
|
||||
}
|
||||
|
@ -77,12 +84,12 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_WithBytesAndItemsPSec\","
|
|||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckBytesAndItemsPSec(Results const& e) {
|
||||
double t = e.DurationCPUTime(); // this (and not real time) is the time used
|
||||
double t = e.DurationCPUTime(); // this (and not real time) is the time used
|
||||
CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
|
||||
CHECK_COUNTER_VALUE(e, int, "bar", EQ, num_calls1);
|
||||
// check that the values are within 0.1% of the expected values
|
||||
CHECK_FLOAT_RESULT_VALUE(e, "bytes_per_second", EQ, 364./t, 0.001);
|
||||
CHECK_FLOAT_RESULT_VALUE(e, "items_per_second", EQ, 150./t, 0.001);
|
||||
CHECK_FLOAT_RESULT_VALUE(e, "bytes_per_second", EQ, 364. / t, 0.001);
|
||||
CHECK_FLOAT_RESULT_VALUE(e, "items_per_second", EQ, 150. / t, 0.001);
|
||||
}
|
||||
CHECK_BENCHMARK_RESULTS("BM_Counters_WithBytesAndItemsPSec",
|
||||
&CheckBytesAndItemsPSec);
|
||||
|
@ -99,7 +106,9 @@ void BM_Counters_Rate(benchmark::State& state) {
|
|||
state.counters["bar"] = bm::Counter{2, bm::Counter::kIsRate};
|
||||
}
|
||||
BENCHMARK(BM_Counters_Rate);
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Rate %console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
|
||||
ADD_CASES(
|
||||
TC_ConsoleOut,
|
||||
{{"^BM_Counters_Rate %console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Rate\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
|
@ -112,10 +121,10 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Rate\",%csv_report,%float,%float$"}});
|
|||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckRate(Results const& e) {
|
||||
double t = e.DurationCPUTime(); // this (and not real time) is the time used
|
||||
double t = e.DurationCPUTime(); // this (and not real time) is the time used
|
||||
// check that the values are within 0.1% of the expected values
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2./t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / t, 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / t, 0.001);
|
||||
}
|
||||
CHECK_BENCHMARK_RESULTS("BM_Counters_Rate", &CheckRate);
|
||||
|
||||
|
@ -130,7 +139,8 @@ void BM_Counters_Threads(benchmark::State& state) {
|
|||
state.counters["bar"] = 2;
|
||||
}
|
||||
BENCHMARK(BM_Counters_Threads)->ThreadRange(1, 8);
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Threads/threads:%int %console_report bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Threads/threads:%int %console_report "
|
||||
"bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Threads/threads:%int\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
|
@ -139,7 +149,9 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Threads/threads:%int\",$"},
|
|||
{"\"bar\": %float,$", MR_Next},
|
||||
{"\"foo\": %float$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Threads/threads:%int\",%csv_report,%float,%float$"}});
|
||||
ADD_CASES(
|
||||
TC_CSVOut,
|
||||
{{"^\"BM_Counters_Threads/threads:%int\",%csv_report,%float,%float$"}});
|
||||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckThreads(Results const& e) {
|
||||
|
@ -160,7 +172,8 @@ void BM_Counters_AvgThreads(benchmark::State& state) {
|
|||
state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreads};
|
||||
}
|
||||
BENCHMARK(BM_Counters_AvgThreads)->ThreadRange(1, 8);
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreads/threads:%int %console_report bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreads/threads:%int "
|
||||
"%console_report bar=%hrfloat foo=%hrfloat$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_AvgThreads/threads:%int\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
|
@ -169,7 +182,9 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_AvgThreads/threads:%int\",$"},
|
|||
{"\"bar\": %float,$", MR_Next},
|
||||
{"\"foo\": %float$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreads/threads:%int\",%csv_report,%float,%float$"}});
|
||||
ADD_CASES(
|
||||
TC_CSVOut,
|
||||
{{"^\"BM_Counters_AvgThreads/threads:%int\",%csv_report,%float,%float$"}});
|
||||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckAvgThreads(Results const& e) {
|
||||
|
@ -191,21 +206,24 @@ void BM_Counters_AvgThreadsRate(benchmark::State& state) {
|
|||
state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreadsRate};
|
||||
}
|
||||
BENCHMARK(BM_Counters_AvgThreadsRate)->ThreadRange(1, 8);
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreadsRate/threads:%int %console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
{"\"cpu_time\": %float,$", MR_Next},
|
||||
{"\"time_unit\": \"ns\",$", MR_Next},
|
||||
{"\"bar\": %float,$", MR_Next},
|
||||
{"\"foo\": %float$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreadsRate/threads:%int\",%csv_report,%float,%float$"}});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreadsRate/threads:%int "
|
||||
"%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
|
||||
ADD_CASES(TC_JSONOut,
|
||||
{{"\"name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %float,$", MR_Next},
|
||||
{"\"cpu_time\": %float,$", MR_Next},
|
||||
{"\"time_unit\": \"ns\",$", MR_Next},
|
||||
{"\"bar\": %float,$", MR_Next},
|
||||
{"\"foo\": %float$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreadsRate/"
|
||||
"threads:%int\",%csv_report,%float,%float$"}});
|
||||
// VS2013 does not allow this function to be passed as a lambda argument
|
||||
// to CHECK_BENCHMARK_RESULTS()
|
||||
void CheckAvgThreadsRate(Results const& e) {
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1./e.DurationCPUTime(), 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2./e.DurationCPUTime(), 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / e.DurationCPUTime(), 0.001);
|
||||
CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / e.DurationCPUTime(), 0.001);
|
||||
}
|
||||
CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreadsRate/threads:%int",
|
||||
&CheckAvgThreadsRate);
|
||||
|
|
Loading…
Reference in New Issue