mirror of https://github.com/google/benchmark.git
google formated
This commit is contained in:
parent
109f528a40
commit
22cb9d9ce0
|
@ -20,7 +20,7 @@
|
|||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "benchmark_api.h" // For forward declaration of BenchmarkReporter
|
||||
#include "benchmark_api.h" // For forward declaration of BenchmarkReporter
|
||||
|
||||
namespace benchmark {
|
||||
|
||||
|
@ -133,14 +133,14 @@ class BenchmarkReporter {
|
|||
error_stream_ = err;
|
||||
}
|
||||
|
||||
std::ostream& GetOutputStream() const {
|
||||
std::ostream& GetOutputStream() const {
|
||||
return *output_stream_;
|
||||
}
|
||||
|
||||
std::ostream& GetErrorStream() const {
|
||||
return *error_stream_;
|
||||
}
|
||||
|
||||
|
||||
virtual ~BenchmarkReporter();
|
||||
|
||||
// Write a human readable string to 'out' representing the specified
|
||||
|
@ -148,7 +148,7 @@ class BenchmarkReporter {
|
|||
// REQUIRES: 'out' is non-null.
|
||||
static void PrintBasicContext(std::ostream* out, Context const& context);
|
||||
|
||||
private:
|
||||
private:
|
||||
std::ostream* output_stream_;
|
||||
std::ostream* error_stream_;
|
||||
};
|
||||
|
@ -156,61 +156,61 @@ private:
|
|||
// Simple reporter that outputs benchmark data to the console. This is the
|
||||
// default reporter used by RunSpecifiedBenchmarks().
|
||||
class ConsoleReporter : public BenchmarkReporter {
|
||||
public:
|
||||
public:
|
||||
virtual bool ReportContext(const Context& context);
|
||||
virtual void ReportRuns(const std::vector<Run>& reports);
|
||||
|
||||
protected:
|
||||
protected:
|
||||
virtual void PrintRunData(const Run& report);
|
||||
|
||||
size_t name_field_width_;
|
||||
};
|
||||
|
||||
class JSONReporter : public BenchmarkReporter {
|
||||
public:
|
||||
public:
|
||||
JSONReporter() : first_report_(true) {}
|
||||
virtual bool ReportContext(const Context& context);
|
||||
virtual void ReportRuns(const std::vector<Run>& reports);
|
||||
virtual void Finalize();
|
||||
|
||||
private:
|
||||
private:
|
||||
void PrintRunData(const Run& report);
|
||||
|
||||
bool first_report_;
|
||||
};
|
||||
|
||||
class CSVReporter : public BenchmarkReporter {
|
||||
public:
|
||||
public:
|
||||
virtual bool ReportContext(const Context& context);
|
||||
virtual void ReportRuns(const std::vector<Run>& reports);
|
||||
|
||||
private:
|
||||
private:
|
||||
void PrintRunData(const Run& report);
|
||||
};
|
||||
|
||||
inline const char* GetTimeUnitString(TimeUnit unit) {
|
||||
switch (unit) {
|
||||
case kMillisecond:
|
||||
return "ms";
|
||||
case kMicrosecond:
|
||||
return "us";
|
||||
case kNanosecond:
|
||||
default:
|
||||
return "ns";
|
||||
case kMillisecond:
|
||||
return "ms";
|
||||
case kMicrosecond:
|
||||
return "us";
|
||||
case kNanosecond:
|
||||
default:
|
||||
return "ns";
|
||||
}
|
||||
}
|
||||
|
||||
inline double GetTimeUnitMultiplier(TimeUnit unit) {
|
||||
switch (unit) {
|
||||
case kMillisecond:
|
||||
return 1e3;
|
||||
case kMicrosecond:
|
||||
return 1e6;
|
||||
case kNanosecond:
|
||||
default:
|
||||
return 1e9;
|
||||
case kMillisecond:
|
||||
return 1e3;
|
||||
case kMicrosecond:
|
||||
return 1e6;
|
||||
case kNanosecond:
|
||||
default:
|
||||
return 1e9;
|
||||
}
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
||||
#endif // BENCHMARK_REPORTER_H_
|
||||
} // end namespace benchmark
|
||||
#endif // BENCHMARK_REPORTER_H_
|
||||
|
|
|
@ -17,55 +17,55 @@
|
|||
|
||||
#include "benchmark/benchmark_api.h"
|
||||
|
||||
#include "complexity.h"
|
||||
#include "check.h"
|
||||
#include "stat.h"
|
||||
#include <cmath>
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include "check.h"
|
||||
#include "complexity.h"
|
||||
#include "stat.h"
|
||||
|
||||
namespace benchmark {
|
||||
|
||||
// Internal function to calculate the different scalability forms
|
||||
BigOFunc* FittingCurve(BigO complexity) {
|
||||
switch (complexity) {
|
||||
case oN:
|
||||
return [](size_t n) -> double {return n; };
|
||||
case oNSquared:
|
||||
return [](size_t n) -> double {return n * n; };
|
||||
case oNCubed:
|
||||
return [](size_t n) -> double {return n * n * n; };
|
||||
case oLogN:
|
||||
return [](size_t n) {return log2(n); };
|
||||
case oNLogN:
|
||||
return [](size_t n) {return n * log2(n); };
|
||||
case o1:
|
||||
default:
|
||||
return [](size_t) {return 1.0; };
|
||||
case oN:
|
||||
return [](size_t n) -> double { return n; };
|
||||
case oNSquared:
|
||||
return [](size_t n) -> double { return n * n; };
|
||||
case oNCubed:
|
||||
return [](size_t n) -> double { return n * n * n; };
|
||||
case oLogN:
|
||||
return [](size_t n) { return log2(n); };
|
||||
case oNLogN:
|
||||
return [](size_t n) { return n * log2(n); };
|
||||
case o1:
|
||||
default:
|
||||
return [](size_t) { return 1.0; };
|
||||
}
|
||||
}
|
||||
|
||||
// Function to return an string for the calculated complexity
|
||||
std::string GetBigOString(BigO complexity) {
|
||||
switch (complexity) {
|
||||
case oN:
|
||||
return "N";
|
||||
case oNSquared:
|
||||
return "N^2";
|
||||
case oNCubed:
|
||||
return "N^3";
|
||||
case oLogN:
|
||||
return "lgN";
|
||||
case oNLogN:
|
||||
return "NlgN";
|
||||
case o1:
|
||||
return "(1)";
|
||||
default:
|
||||
return "f(N)";
|
||||
case oN:
|
||||
return "N";
|
||||
case oNSquared:
|
||||
return "N^2";
|
||||
case oNCubed:
|
||||
return "N^3";
|
||||
case oLogN:
|
||||
return "lgN";
|
||||
case oNLogN:
|
||||
return "NlgN";
|
||||
case o1:
|
||||
return "(1)";
|
||||
default:
|
||||
return "f(N)";
|
||||
}
|
||||
}
|
||||
|
||||
// Find the coefficient for the high-order term in the running time, by
|
||||
// minimizing the sum of squares of relative error, for the fitting curve
|
||||
// Find the coefficient for the high-order term in the running time, by
|
||||
// minimizing the sum of squares of relative error, for the fitting curve
|
||||
// given by the lambda expresion.
|
||||
// - n : Vector containing the size of the benchmark tests.
|
||||
// - time : Vector containing the times for the benchmark tests.
|
||||
|
@ -122,14 +122,14 @@ LeastSq MinimalLeastSq(const std::vector<int>& n,
|
|||
const std::vector<double>& time,
|
||||
const BigO complexity) {
|
||||
CHECK_EQ(n.size(), time.size());
|
||||
CHECK_GE(n.size(), 2); // Do not compute fitting curve is less than two benchmark runs are given
|
||||
CHECK_GE(n.size(), 2); // Do not compute fitting curve is less than two
|
||||
// benchmark runs are given
|
||||
CHECK_NE(complexity, oNone);
|
||||
|
||||
LeastSq best_fit;
|
||||
|
||||
if(complexity == oAuto) {
|
||||
std::vector<BigO> fit_curves = {
|
||||
oLogN, oN, oNLogN, oNSquared, oNCubed };
|
||||
if (complexity == oAuto) {
|
||||
std::vector<BigO> fit_curves = {oLogN, oN, oNLogN, oNSquared, oNCubed};
|
||||
|
||||
// Take o1 as default best fitting curve
|
||||
best_fit = MinimalLeastSq(n, time, FittingCurve(o1));
|
||||
|
@ -152,14 +152,13 @@ LeastSq MinimalLeastSq(const std::vector<int>& n,
|
|||
}
|
||||
|
||||
std::vector<BenchmarkReporter::Run> ComputeStats(
|
||||
const std::vector<BenchmarkReporter::Run>& reports)
|
||||
{
|
||||
const std::vector<BenchmarkReporter::Run>& reports) {
|
||||
typedef BenchmarkReporter::Run Run;
|
||||
std::vector<Run> results;
|
||||
|
||||
auto error_count = std::count_if(
|
||||
reports.begin(), reports.end(),
|
||||
[](Run const& run) {return run.error_occurred;});
|
||||
auto error_count =
|
||||
std::count_if(reports.begin(), reports.end(),
|
||||
[](Run const& run) { return run.error_occurred; });
|
||||
|
||||
if (reports.size() - error_count < 2) {
|
||||
// We don't report aggregated data if there was a single run.
|
||||
|
@ -178,12 +177,11 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
for (Run const& run : reports) {
|
||||
CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
|
||||
CHECK_EQ(run_iterations, run.iterations);
|
||||
if (run.error_occurred)
|
||||
continue;
|
||||
if (run.error_occurred) continue;
|
||||
real_accumulated_time_stat +=
|
||||
Stat1_d(run.real_accumulated_time/run.iterations, run.iterations);
|
||||
Stat1_d(run.real_accumulated_time / run.iterations, run.iterations);
|
||||
cpu_accumulated_time_stat +=
|
||||
Stat1_d(run.cpu_accumulated_time/run.iterations, run.iterations);
|
||||
Stat1_d(run.cpu_accumulated_time / run.iterations, run.iterations);
|
||||
items_per_second_stat += Stat1_d(run.items_per_second, run.iterations);
|
||||
bytes_per_second_stat += Stat1_d(run.bytes_per_second, run.iterations);
|
||||
}
|
||||
|
@ -192,10 +190,10 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
Run mean_data;
|
||||
mean_data.benchmark_name = reports[0].benchmark_name + "_mean";
|
||||
mean_data.iterations = run_iterations;
|
||||
mean_data.real_accumulated_time = real_accumulated_time_stat.Mean() *
|
||||
run_iterations;
|
||||
mean_data.cpu_accumulated_time = cpu_accumulated_time_stat.Mean() *
|
||||
run_iterations;
|
||||
mean_data.real_accumulated_time =
|
||||
real_accumulated_time_stat.Mean() * run_iterations;
|
||||
mean_data.cpu_accumulated_time =
|
||||
cpu_accumulated_time_stat.Mean() * run_iterations;
|
||||
mean_data.bytes_per_second = bytes_per_second_stat.Mean();
|
||||
mean_data.items_per_second = items_per_second_stat.Mean();
|
||||
|
||||
|
@ -212,10 +210,8 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
stddev_data.benchmark_name = reports[0].benchmark_name + "_stddev";
|
||||
stddev_data.report_label = mean_data.report_label;
|
||||
stddev_data.iterations = 0;
|
||||
stddev_data.real_accumulated_time =
|
||||
real_accumulated_time_stat.StdDev();
|
||||
stddev_data.cpu_accumulated_time =
|
||||
cpu_accumulated_time_stat.StdDev();
|
||||
stddev_data.real_accumulated_time = real_accumulated_time_stat.StdDev();
|
||||
stddev_data.cpu_accumulated_time = cpu_accumulated_time_stat.StdDev();
|
||||
stddev_data.bytes_per_second = bytes_per_second_stat.StdDev();
|
||||
stddev_data.items_per_second = items_per_second_stat.StdDev();
|
||||
|
||||
|
@ -225,8 +221,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
|||
}
|
||||
|
||||
std::vector<BenchmarkReporter::Run> ComputeBigO(
|
||||
const std::vector<BenchmarkReporter::Run>& reports)
|
||||
{
|
||||
const std::vector<BenchmarkReporter::Run>& reports) {
|
||||
typedef BenchmarkReporter::Run Run;
|
||||
std::vector<Run> results;
|
||||
|
||||
|
@ -240,8 +235,8 @@ std::vector<BenchmarkReporter::Run> ComputeBigO(
|
|||
// Populate the accumulators.
|
||||
for (const Run& run : reports) {
|
||||
n.push_back(run.complexity_n);
|
||||
real_time.push_back(run.real_accumulated_time/run.iterations);
|
||||
cpu_time.push_back(run.cpu_accumulated_time/run.iterations);
|
||||
real_time.push_back(run.real_accumulated_time / run.iterations);
|
||||
cpu_time.push_back(run.cpu_accumulated_time / run.iterations);
|
||||
}
|
||||
|
||||
LeastSq result_cpu;
|
||||
|
@ -254,7 +249,8 @@ std::vector<BenchmarkReporter::Run> ComputeBigO(
|
|||
result_cpu = MinimalLeastSq(n, cpu_time, reports[0].complexity_lambda);
|
||||
result_real = MinimalLeastSq(n, real_time, reports[0].complexity_lambda);
|
||||
}
|
||||
std::string benchmark_name = reports[0].benchmark_name.substr(0, reports[0].benchmark_name.find('/'));
|
||||
std::string benchmark_name =
|
||||
reports[0].benchmark_name.substr(0, reports[0].benchmark_name.find('/'));
|
||||
|
||||
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
||||
Run big_o;
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
#include "benchmark/reporter.h"
|
||||
#include "complexity.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <cstdio>
|
||||
#include <algorithm>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
|
@ -62,8 +62,8 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
|
|||
void ConsoleReporter::PrintRunData(const Run& result) {
|
||||
auto& Out = GetOutputStream();
|
||||
|
||||
auto name_color = (result.report_big_o || result.report_rms)
|
||||
? COLOR_BLUE : COLOR_GREEN;
|
||||
auto name_color =
|
||||
(result.report_big_o || result.report_rms) ? COLOR_BLUE : COLOR_GREEN;
|
||||
ColorPrintf(Out, name_color, "%-*s ", name_field_width_,
|
||||
result.benchmark_name.c_str());
|
||||
|
||||
|
@ -84,25 +84,25 @@ void ConsoleReporter::PrintRunData(const Run& result) {
|
|||
if (result.items_per_second > 0) {
|
||||
items = StrCat(" ", HumanReadableNumber(result.items_per_second),
|
||||
" items/s");
|
||||
}
|
||||
}
|
||||
|
||||
const double real_time = result.GetAdjustedRealTime();
|
||||
const double cpu_time = result.GetAdjustedCPUTime();
|
||||
|
||||
if(result.report_big_o) {
|
||||
if (result.report_big_o) {
|
||||
std::string big_o = GetBigOString(result.complexity);
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.2f %s %10.2f %s ",
|
||||
real_time, big_o.c_str(), cpu_time, big_o.c_str());
|
||||
} else if(result.report_rms) {
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %% %10.0f %% ",
|
||||
real_time * 100, cpu_time * 100);
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.2f %s %10.2f %s ", real_time,
|
||||
big_o.c_str(), cpu_time, big_o.c_str());
|
||||
} else if (result.report_rms) {
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %% %10.0f %% ", real_time * 100,
|
||||
cpu_time * 100);
|
||||
} else {
|
||||
const char* timeLabel = GetTimeUnitString(result.time_unit);
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %s %10.0f %s ",
|
||||
real_time, timeLabel, cpu_time, timeLabel);
|
||||
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %s %10.0f %s ", real_time, timeLabel,
|
||||
cpu_time, timeLabel);
|
||||
}
|
||||
|
||||
if(!result.report_big_o && !result.report_rms) {
|
||||
if (!result.report_big_o && !result.report_rms) {
|
||||
ColorPrintf(Out, COLOR_CYAN, "%10lld", result.iterations);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
#include "benchmark/reporter.h"
|
||||
#include "complexity.h"
|
||||
|
||||
#include <cstdint>
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
|
@ -80,7 +80,7 @@ void CSVReporter::PrintRunData(const Run & run) {
|
|||
}
|
||||
|
||||
// Do not print iteration on bigO and RMS report
|
||||
if(!run.report_big_o && !run.report_rms) {
|
||||
if (!run.report_big_o && !run.report_rms) {
|
||||
Out << run.iterations;
|
||||
}
|
||||
Out << ",";
|
||||
|
@ -89,9 +89,9 @@ void CSVReporter::PrintRunData(const Run & run) {
|
|||
Out << run.GetAdjustedCPUTime() << ",";
|
||||
|
||||
// Do not print timeLabel on bigO and RMS report
|
||||
if(run.report_big_o) {
|
||||
if (run.report_big_o) {
|
||||
Out << GetBigOString(run.complexity);
|
||||
} else if(!run.report_rms){
|
||||
} else if (!run.report_rms) {
|
||||
Out << GetTimeUnitString(run.time_unit);
|
||||
}
|
||||
Out << ",";
|
||||
|
@ -111,7 +111,7 @@ void CSVReporter::PrintRunData(const Run & run) {
|
|||
ReplaceAll(&label, "\"", "\"\"");
|
||||
Out << "\"" << label << "\"";
|
||||
}
|
||||
Out << ",,"; // for error_occurred and error_message
|
||||
Out << ",,"; // for error_occurred and error_message
|
||||
Out << '\n';
|
||||
}
|
||||
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
#include "benchmark/reporter.h"
|
||||
#include "complexity.h"
|
||||
|
||||
#include <cstdint>
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
|
@ -100,24 +100,24 @@ void JSONReporter::ReportRuns(std::vector<Run> const& reports) {
|
|||
first_report_ = false;
|
||||
|
||||
for (auto it = reports.begin(); it != reports.end(); ++it) {
|
||||
out << indent << "{\n";
|
||||
PrintRunData(*it);
|
||||
out << indent << '}';
|
||||
auto it_cp = it;
|
||||
if (++it_cp != reports.end()) {
|
||||
out << ",\n";
|
||||
}
|
||||
out << indent << "{\n";
|
||||
PrintRunData(*it);
|
||||
out << indent << '}';
|
||||
auto it_cp = it;
|
||||
if (++it_cp != reports.end()) {
|
||||
out << ",\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void JSONReporter::Finalize() {
|
||||
// Close the list of benchmarks and the top level object.
|
||||
GetOutputStream() << "\n ]\n}\n";
|
||||
// Close the list of benchmarks and the top level object.
|
||||
GetOutputStream() << "\n ]\n}\n";
|
||||
}
|
||||
|
||||
void JSONReporter::PrintRunData(Run const& run) {
|
||||
std::string indent(6, ' ');
|
||||
std::ostream& out = GetOutputStream();
|
||||
std::string indent(6, ' ');
|
||||
std::ostream& out = GetOutputStream();
|
||||
out << indent
|
||||
<< FormatKV("name", run.benchmark_name)
|
||||
<< ",\n";
|
||||
|
@ -129,7 +129,7 @@ void JSONReporter::PrintRunData(Run const& run) {
|
|||
<< FormatKV("error_message", run.error_message)
|
||||
<< ",\n";
|
||||
}
|
||||
if(!run.report_big_o && !run.report_rms) {
|
||||
if (!run.report_big_o && !run.report_rms) {
|
||||
out << indent
|
||||
<< FormatKV("iterations", run.iterations)
|
||||
<< ",\n";
|
||||
|
@ -140,14 +140,14 @@ void JSONReporter::PrintRunData(Run const& run) {
|
|||
<< FormatKV("cpu_time", RoundDouble(run.GetAdjustedCPUTime()));
|
||||
out << ",\n" << indent
|
||||
<< FormatKV("time_unit", GetTimeUnitString(run.time_unit));
|
||||
} else if(run.report_big_o) {
|
||||
out << indent
|
||||
<< FormatKV("cpu_coefficient", RoundDouble(run.GetAdjustedCPUTime()))
|
||||
<< ",\n";
|
||||
out << indent
|
||||
<< FormatKV("real_coefficient", RoundDouble(run.GetAdjustedRealTime()))
|
||||
<< ",\n";
|
||||
out << indent
|
||||
} else if (run.report_big_o) {
|
||||
out << indent
|
||||
<< FormatKV("cpu_coefficient", RoundDouble(run.GetAdjustedCPUTime()))
|
||||
<< ",\n";
|
||||
out << indent
|
||||
<< FormatKV("real_coefficient", RoundDouble(run.GetAdjustedRealTime()))
|
||||
<< ",\n";
|
||||
out << indent
|
||||
<< FormatKV("big_o", GetBigOString(run.complexity))
|
||||
<< ",\n";
|
||||
out << indent
|
||||
|
@ -156,20 +156,23 @@ void JSONReporter::PrintRunData(Run const& run) {
|
|||
out << indent
|
||||
<< FormatKV("rms", RoundDouble(run.GetAdjustedCPUTime()*100))
|
||||
<< '%';
|
||||
}
|
||||
if (run.bytes_per_second > 0.0) {
|
||||
out << ",\n" << indent
|
||||
<< FormatKV("bytes_per_second", RoundDouble(run.bytes_per_second));
|
||||
}
|
||||
if (run.items_per_second > 0.0) {
|
||||
out << ",\n" << indent
|
||||
<< FormatKV("items_per_second", RoundDouble(run.items_per_second));
|
||||
}
|
||||
if (!run.report_label.empty()) {
|
||||
out << ",\n" << indent
|
||||
<< FormatKV("label", run.report_label);
|
||||
}
|
||||
out << '\n';
|
||||
}
|
||||
if (run.bytes_per_second > 0.0) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV("bytes_per_second", RoundDouble(run.bytes_per_second));
|
||||
}
|
||||
if (run.items_per_second > 0.0) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV("items_per_second", RoundDouble(run.items_per_second));
|
||||
}
|
||||
if (!run.report_label.empty()) {
|
||||
out << ",\n"
|
||||
<< indent
|
||||
<< FormatKV("label", run.report_label);
|
||||
}
|
||||
out << '\n';
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
||||
} // end namespace benchmark
|
||||
|
|
Loading…
Reference in New Issue