diff --git a/include/benchmark/reporter.h b/include/benchmark/reporter.h index b66854c0..3d573d83 100644 --- a/include/benchmark/reporter.h +++ b/include/benchmark/reporter.h @@ -88,11 +88,24 @@ class ConsoleReporter : public BenchmarkReporter { public: virtual bool ReportContext(const Context& context); virtual void ReportRuns(const std::vector& reports); - private: +protected: virtual void PrintRunData(const Run& report); size_t name_field_width_; }; +class JSONReporter : public BenchmarkReporter { +public: + JSONReporter() : first_report_(true) {} + virtual bool ReportContext(const Context& context); + virtual void ReportRuns(const std::vector& reports); + virtual void Finalize(); + +private: + void PrintRunData(const Run& report); + + bool first_report_; +}; + } // end namespace benchmark #endif // BENCHMARK_REPORTER_H_ diff --git a/src/benchmark.cc b/src/benchmark.cc index 69562fff..36d1d841 100644 --- a/src/benchmark.cc +++ b/src/benchmark.cc @@ -60,6 +60,10 @@ DEFINE_int32(benchmark_repetitions, 1, "The number of runs of each benchmark. If greater than 1, the " "mean and standard deviation of the runs will be reported."); +DEFINE_string(benchmark_format, "tabular", + "The format to use for console output. Valid values are " + "'tabular' or 'json'."); + DEFINE_bool(color_print, true, "Enables colorized logging."); DEFINE_int32(v, 0, "The level of verbose logging to output"); @@ -807,19 +811,35 @@ void RunMatchingBenchmarks(const std::string& spec, } } +std::unique_ptr GetDefaultReporter() { + typedef std::unique_ptr PtrType; + if (FLAGS_benchmark_format == "tabular") { + return PtrType(new ConsoleReporter); + } + else if (FLAGS_benchmark_format == "json") { + return PtrType(new JSONReporter); + } else { + std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n"; + std::exit(1); + } +} + } // end namespace internal void RunSpecifiedBenchmarks() { RunSpecifiedBenchmarks(nullptr); } -void RunSpecifiedBenchmarks(BenchmarkReporter* provided_reporter) { +void RunSpecifiedBenchmarks(BenchmarkReporter* reporter) { std::string spec = FLAGS_benchmark_filter; if (spec.empty() || spec == "all") spec = "."; // Regexp that matches all benchmarks - ConsoleReporter default_reporter; - BenchmarkReporter* reporter = provided_reporter ? provided_reporter - : &default_reporter; + + std::unique_ptr default_reporter; + if (!reporter) { + default_reporter = internal::GetDefaultReporter(); + reporter = default_reporter.get(); + } internal::RunMatchingBenchmarks(spec, reporter); reporter->Finalize(); } @@ -833,6 +853,7 @@ void PrintUsageAndExit() { " [--benchmark_iterations=]\n" " [--benchmark_min_time=]\n" " [--benchmark_repetitions=]\n" + " [--benchmark_format=]\n" " [--color_print={true|false}]\n" " [--v=]\n"); exit(0); @@ -850,6 +871,8 @@ void ParseCommandLineFlags(int* argc, const char** argv) { &FLAGS_benchmark_min_time) || ParseInt32Flag(argv[i], "benchmark_repetitions", &FLAGS_benchmark_repetitions) || + ParseStringFlag(argv[i], "benchmark_format", + &FLAGS_benchmark_format) || ParseBoolFlag(argv[i], "color_print", &FLAGS_color_print) || ParseInt32Flag(argv[i], "v", &FLAGS_v)) { @@ -861,6 +884,10 @@ void ParseCommandLineFlags(int* argc, const char** argv) { PrintUsageAndExit(); } } + if (FLAGS_benchmark_format != "tabular" && + FLAGS_benchmark_format != "json") { + PrintUsageAndExit(); + } } } // end namespace internal diff --git a/src/reporter.cc b/src/reporter.cc index 27258c22..d97321dc 100644 --- a/src/reporter.cc +++ b/src/reporter.cc @@ -16,6 +16,7 @@ #include #include +#include #include #include @@ -188,4 +189,145 @@ void ConsoleReporter::PrintRunData(const Run& result) { result.report_label.c_str()); } +namespace { + +std::string FormatKV(std::string const& key, std::string const& value) { + return StringPrintF("\"%s\": \"%s\"", key.c_str(), value.c_str()); +} + +std::string FormatKV(std::string const& key, const char* value) { + return StringPrintF("\"%s\": \"%s\"", key.c_str(), value); +} + +std::string FormatKV(std::string const& key, bool value) { + return StringPrintF("\"%s\": %s", key.c_str(), value ? "true" : "false"); +} + +std::string FormatKV(std::string const& key, int64_t value) { + std::stringstream ss; + ss << '"' << key << "\": " << value; + return ss.str(); +} + +std::string FormatKV(std::string const& key, std::size_t value) { + std::stringstream ss; + ss << '"' << key << "\": " << value; + return ss.str(); +} + +int64_t RoundDouble(double v) { + return static_cast(v + 0.5); +} + +} // end namespace + +bool JSONReporter::ReportContext(const Context& context) { + std::ostream& out = std::cout; + + out << "{\n"; + std::string inner_indent(2, ' '); + + // Open context block and print context information. + out << inner_indent << "\"context\": {\n"; + std::string indent(4, ' '); + int remainder_us; + std::string walltime_value = walltime::Print( + walltime::Now(), "%Y/%m/%d-%H:%M:%S", + true, // use local timezone + &remainder_us); + out << indent << FormatKV("date", walltime_value) << ",\n"; + + out << indent + << FormatKV("num_cpus", static_cast(context.num_cpus)) + << ",\n"; + out << indent + << FormatKV("mhz_per_cpu", RoundDouble(context.mhz_per_cpu)) + << ",\n"; + out << indent + << FormatKV("cpu_scaling_enabled", context.cpu_scaling_enabled) + << ",\n"; + +#if defined(NDEBUG) + const char* build_type = "release"; +#else + const char* build_type = "debug"; +#endif + out << indent << FormatKV("build_type", build_type) << "\n"; + // Close context block and open the list of benchmarks. + out << inner_indent << "},\n"; + out << inner_indent << "\"benchmarks\": [\n"; + return true; +} + +void JSONReporter::ReportRuns(std::vector const& reports) { + if (reports.empty()) { + return; + } + std::string indent(4, ' '); + std::ostream& out = std::cout; + if (!first_report_) { + out << ",\n"; + } + first_report_ = false; + std::vector reports_cp = reports; + if (reports.size() >= 2) { + Run mean_data; + Run stddev_data; + ComputeStats(reports, &mean_data, &stddev_data); + reports_cp.push_back(mean_data); + reports_cp.push_back(stddev_data); + } + for (auto it = reports_cp.begin(); it != reports_cp.end(); ++it) { + out << indent << "{\n"; + PrintRunData(*it); + out << indent << '}'; + auto it_cp = it; + if (++it_cp != reports_cp.end()) { + out << ','; + } + } +} + +void JSONReporter::Finalize() { + // Close the list of benchmarks and the top level object. + std::cout << "\n ]\n}\n"; +} + +void JSONReporter::PrintRunData(Run const& run) { + double const multiplier = 1e9; // nano second multiplier + double cpu_time = run.cpu_accumulated_time * multiplier; + double real_time = run.real_accumulated_time * multiplier; + if (run.iterations != 0) { + real_time = real_time / static_cast(run.iterations); + cpu_time = cpu_time / static_cast(run.iterations); + } + + std::string indent(6, ' '); + std::ostream& out = std::cout; + out << indent + << FormatKV("name", run.benchmark_name) + << ",\n"; + out << indent + << FormatKV("iterations", run.iterations) + << ",\n"; + out << indent + << FormatKV("real_time", RoundDouble(real_time)) + << ",\n"; + out << indent + << FormatKV("cpu_time", RoundDouble(cpu_time)); + if (run.bytes_per_second > 0.0) { + out << ",\n" << indent + << FormatKV("bytes_per_second", RoundDouble(run.bytes_per_second)); + } + if (run.items_per_second > 0.0) { + out << ",\n" << indent + << FormatKV("items_per_second", RoundDouble(run.items_per_second)); + } + if (!run.report_label.empty()) { + out << ",\n" << indent + << FormatKV("label", run.report_label); + } + out << '\n'; +} + } // end namespace benchmark