mirror of
https://github.com/google/benchmark.git
synced 2024-11-26 16:31:54 +00:00
commit
b5dd1506d8
|
@ -109,5 +109,14 @@ private:
|
|||
bool first_report_;
|
||||
};
|
||||
|
||||
class CSVReporter : public BenchmarkReporter {
|
||||
public:
|
||||
virtual bool ReportContext(const Context& context);
|
||||
virtual void ReportRuns(const std::vector<Run>& reports);
|
||||
|
||||
private:
|
||||
void PrintRunData(const Run& report);
|
||||
};
|
||||
|
||||
} // end namespace benchmark
|
||||
#endif // BENCHMARK_REPORTER_H_
|
||||
|
|
|
@ -2,8 +2,9 @@
|
|||
include_directories(${PROJECT_SOURCE_DIR}/src)
|
||||
|
||||
# Define the source files
|
||||
set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc" "log.cc"
|
||||
"json_reporter.cc" "reporter.cc" "sleep.cc" "string_util.cc"
|
||||
set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc"
|
||||
"console_reporter.cc" "csv_reporter.cc" "json_reporter.cc"
|
||||
"log.cc" "reporter.cc" "sleep.cc" "string_util.cc"
|
||||
"sysinfo.cc" "walltime.cc")
|
||||
# Determine the correct regular expression engine to use
|
||||
if(HAVE_STD_REGEX)
|
||||
|
|
|
@ -58,7 +58,7 @@ DEFINE_int32(benchmark_repetitions, 1,
|
|||
|
||||
DEFINE_string(benchmark_format, "tabular",
|
||||
"The format to use for console output. Valid values are "
|
||||
"'tabular' or 'json'.");
|
||||
"'tabular', 'json', or 'csv'.");
|
||||
|
||||
DEFINE_bool(color_print, true, "Enables colorized logging.");
|
||||
|
||||
|
@ -804,9 +804,10 @@ std::unique_ptr<BenchmarkReporter> GetDefaultReporter() {
|
|||
typedef std::unique_ptr<BenchmarkReporter> PtrType;
|
||||
if (FLAGS_benchmark_format == "tabular") {
|
||||
return PtrType(new ConsoleReporter);
|
||||
}
|
||||
else if (FLAGS_benchmark_format == "json") {
|
||||
} else if (FLAGS_benchmark_format == "json") {
|
||||
return PtrType(new JSONReporter);
|
||||
} else if (FLAGS_benchmark_format == "csv") {
|
||||
return PtrType(new CSVReporter);
|
||||
} else {
|
||||
std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n";
|
||||
std::exit(1);
|
||||
|
@ -841,7 +842,7 @@ void PrintUsageAndExit() {
|
|||
" [--benchmark_filter=<regex>]\n"
|
||||
" [--benchmark_min_time=<min_time>]\n"
|
||||
" [--benchmark_repetitions=<num_repetitions>]\n"
|
||||
" [--benchmark_format=<tabular|json>]\n"
|
||||
" [--benchmark_format=<tabular|json|csv>]\n"
|
||||
" [--color_print={true|false}]\n"
|
||||
" [--v=<verbosity>]\n");
|
||||
exit(0);
|
||||
|
@ -871,7 +872,8 @@ void ParseCommandLineFlags(int* argc, const char** argv) {
|
|||
}
|
||||
}
|
||||
if (FLAGS_benchmark_format != "tabular" &&
|
||||
FLAGS_benchmark_format != "json") {
|
||||
FLAGS_benchmark_format != "json" &&
|
||||
FLAGS_benchmark_format != "csv") {
|
||||
PrintUsageAndExit();
|
||||
}
|
||||
}
|
||||
|
|
118
src/console_reporter.cc
Normal file
118
src/console_reporter.cc
Normal file
|
@ -0,0 +1,118 @@
|
|||
// Copyright 2015 Google Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "benchmark/reporter.h"
|
||||
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "check.h"
|
||||
#include "colorprint.h"
|
||||
#include "string_util.h"
|
||||
#include "walltime.h"
|
||||
|
||||
namespace benchmark {
|
||||
|
||||
bool ConsoleReporter::ReportContext(const Context& context) {
|
||||
name_field_width_ = context.name_field_width;
|
||||
|
||||
std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu
|
||||
<< " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n";
|
||||
|
||||
std::cerr << LocalDateTimeString() << "\n";
|
||||
|
||||
if (context.cpu_scaling_enabled) {
|
||||
std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark "
|
||||
"real time measurements may be noisy and will incure extra "
|
||||
"overhead.\n";
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
std::cerr << "Build Type: DEBUG\n";
|
||||
#endif
|
||||
|
||||
int output_width =
|
||||
fprintf(stdout,
|
||||
"%-*s %10s %10s %10s\n",
|
||||
static_cast<int>(name_field_width_),
|
||||
"Benchmark",
|
||||
"Time(ns)", "CPU(ns)",
|
||||
"Iterations");
|
||||
std::cout << std::string(output_width - 1, '-') << "\n";
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
|
||||
if (reports.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (Run const& run : reports) {
|
||||
CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
|
||||
PrintRunData(run);
|
||||
}
|
||||
|
||||
if (reports.size() < 2) {
|
||||
// We don't report aggregated data if there was a single run.
|
||||
return;
|
||||
}
|
||||
|
||||
Run mean_data;
|
||||
Run stddev_data;
|
||||
BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
|
||||
|
||||
// Output using PrintRun.
|
||||
PrintRunData(mean_data);
|
||||
PrintRunData(stddev_data);
|
||||
}
|
||||
|
||||
void ConsoleReporter::PrintRunData(const Run& result) {
|
||||
// Format bytes per second
|
||||
std::string rate;
|
||||
if (result.bytes_per_second > 0) {
|
||||
rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s");
|
||||
}
|
||||
|
||||
// Format items per second
|
||||
std::string items;
|
||||
if (result.items_per_second > 0) {
|
||||
items = StrCat(" ", HumanReadableNumber(result.items_per_second),
|
||||
" items/s");
|
||||
}
|
||||
|
||||
double const multiplier = 1e9; // nano second multiplier
|
||||
ColorPrintf(COLOR_GREEN, "%-*s ",
|
||||
name_field_width_, result.benchmark_name.c_str());
|
||||
if (result.iterations == 0) {
|
||||
ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
|
||||
result.real_accumulated_time * multiplier,
|
||||
result.cpu_accumulated_time * multiplier);
|
||||
} else {
|
||||
ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
|
||||
(result.real_accumulated_time * multiplier) /
|
||||
(static_cast<double>(result.iterations)),
|
||||
(result.cpu_accumulated_time * multiplier) /
|
||||
(static_cast<double>(result.iterations)));
|
||||
}
|
||||
ColorPrintf(COLOR_CYAN, "%10lld", result.iterations);
|
||||
ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n",
|
||||
13, rate.c_str(),
|
||||
18, items.c_str(),
|
||||
result.report_label.c_str());
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
104
src/csv_reporter.cc
Normal file
104
src/csv_reporter.cc
Normal file
|
@ -0,0 +1,104 @@
|
|||
// Copyright 2015 Google Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "benchmark/reporter.h"
|
||||
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "string_util.h"
|
||||
#include "walltime.h"
|
||||
|
||||
// File format reference: http://edoceo.com/utilitas/csv-file-format.
|
||||
|
||||
namespace benchmark {
|
||||
|
||||
bool CSVReporter::ReportContext(const Context& context) {
|
||||
std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu
|
||||
<< " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n";
|
||||
|
||||
std::cerr << LocalDateTimeString() << "\n";
|
||||
|
||||
if (context.cpu_scaling_enabled) {
|
||||
std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark "
|
||||
"real time measurements may be noisy and will incure extra "
|
||||
"overhead.\n";
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
std::cerr << "Build Type: DEBUG\n";
|
||||
#endif
|
||||
std::cout << "name,iterations,real_time,cpu_time,bytes_per_second,"
|
||||
"items_per_second,label\n";
|
||||
return true;
|
||||
}
|
||||
|
||||
void CSVReporter::ReportRuns(std::vector<Run> const& reports) {
|
||||
if (reports.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<Run> reports_cp = reports;
|
||||
if (reports.size() >= 2) {
|
||||
Run mean_data;
|
||||
Run stddev_data;
|
||||
BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
|
||||
reports_cp.push_back(mean_data);
|
||||
reports_cp.push_back(stddev_data);
|
||||
}
|
||||
for (auto it = reports_cp.begin(); it != reports_cp.end(); ++it) {
|
||||
PrintRunData(*it);
|
||||
}
|
||||
}
|
||||
|
||||
void CSVReporter::PrintRunData(Run const& run) {
|
||||
double const multiplier = 1e9; // nano second multiplier
|
||||
double cpu_time = run.cpu_accumulated_time * multiplier;
|
||||
double real_time = run.real_accumulated_time * multiplier;
|
||||
if (run.iterations != 0) {
|
||||
real_time = real_time / static_cast<double>(run.iterations);
|
||||
cpu_time = cpu_time / static_cast<double>(run.iterations);
|
||||
}
|
||||
|
||||
// Field with embedded double-quote characters must be doubled and the field
|
||||
// delimited with double-quotes.
|
||||
std::string name = run.benchmark_name;
|
||||
ReplaceAll(&name, "\"", "\"\"");
|
||||
std::cout << "\"" << name << "\",";
|
||||
|
||||
std::cout << run.iterations << ",";
|
||||
std::cout << real_time << ",";
|
||||
std::cout << cpu_time << ",";
|
||||
|
||||
if (run.bytes_per_second > 0.0) {
|
||||
std::cout << run.bytes_per_second;
|
||||
}
|
||||
std::cout << ",";
|
||||
if (run.items_per_second > 0.0) {
|
||||
std::cout << run.items_per_second;
|
||||
}
|
||||
std::cout << ",";
|
||||
if (!run.report_label.empty()) {
|
||||
// Field with embedded double-quote characters must be doubled and the field
|
||||
// delimited with double-quotes.
|
||||
std::string label = run.report_label;
|
||||
ReplaceAll(&label, "\"", "\"\"");
|
||||
std::cout << "\"" << label << "\"";
|
||||
}
|
||||
std::cout << '\n';
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
100
src/reporter.cc
100
src/reporter.cc
|
@ -14,17 +14,11 @@
|
|||
|
||||
#include "benchmark/reporter.h"
|
||||
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "check.h"
|
||||
#include "colorprint.h"
|
||||
#include "stat.h"
|
||||
#include "string_util.h"
|
||||
#include "walltime.h"
|
||||
|
||||
namespace benchmark {
|
||||
|
||||
|
@ -89,98 +83,4 @@ void BenchmarkReporter::Finalize() {
|
|||
BenchmarkReporter::~BenchmarkReporter() {
|
||||
}
|
||||
|
||||
bool ConsoleReporter::ReportContext(const Context& context) {
|
||||
name_field_width_ = context.name_field_width;
|
||||
|
||||
fprintf(stdout,
|
||||
"Run on (%d X %0.0f MHz CPU%s)\n",
|
||||
context.num_cpus,
|
||||
context.mhz_per_cpu,
|
||||
(context.num_cpus > 1) ? "s" : "");
|
||||
|
||||
std::string walltime_str = LocalDateTimeString();
|
||||
fprintf(stdout, "%s\n", walltime_str.c_str());
|
||||
|
||||
if (context.cpu_scaling_enabled) {
|
||||
fprintf(stdout, "***WARNING*** CPU scaling is enabled, the benchmark "
|
||||
"real time measurements may be noisy and will incure extra "
|
||||
"overhead.\n");
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
fprintf(stdout, "Build Type: DEBUG\n");
|
||||
#endif
|
||||
|
||||
int output_width =
|
||||
fprintf(stdout,
|
||||
"%-*s %10s %10s %10s\n",
|
||||
static_cast<int>(name_field_width_),
|
||||
"Benchmark",
|
||||
"Time(ns)", "CPU(ns)",
|
||||
"Iterations");
|
||||
fprintf(stdout, "%s\n", std::string(output_width - 1, '-').c_str());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
|
||||
if (reports.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (Run const& run : reports) {
|
||||
CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
|
||||
PrintRunData(run);
|
||||
}
|
||||
|
||||
if (reports.size() < 2) {
|
||||
// We don't report aggregated data if there was a single run.
|
||||
return;
|
||||
}
|
||||
|
||||
Run mean_data;
|
||||
Run stddev_data;
|
||||
BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
|
||||
|
||||
// Output using PrintRun.
|
||||
PrintRunData(mean_data);
|
||||
PrintRunData(stddev_data);
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
|
||||
void ConsoleReporter::PrintRunData(const Run& result) {
|
||||
// Format bytes per second
|
||||
std::string rate;
|
||||
if (result.bytes_per_second > 0) {
|
||||
rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s");
|
||||
}
|
||||
|
||||
// Format items per second
|
||||
std::string items;
|
||||
if (result.items_per_second > 0) {
|
||||
items = StrCat(" ", HumanReadableNumber(result.items_per_second),
|
||||
" items/s");
|
||||
}
|
||||
|
||||
double const multiplier = 1e9; // nano second multiplier
|
||||
ColorPrintf(COLOR_GREEN, "%-*s ",
|
||||
name_field_width_, result.benchmark_name.c_str());
|
||||
if (result.iterations == 0) {
|
||||
ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
|
||||
result.real_accumulated_time * multiplier,
|
||||
result.cpu_accumulated_time * multiplier);
|
||||
} else {
|
||||
ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
|
||||
(result.real_accumulated_time * multiplier) /
|
||||
(static_cast<double>(result.iterations)),
|
||||
(result.cpu_accumulated_time * multiplier) /
|
||||
(static_cast<double>(result.iterations)));
|
||||
}
|
||||
ColorPrintf(COLOR_CYAN, "%10lld", result.iterations);
|
||||
ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n",
|
||||
13, rate.c_str(),
|
||||
18, items.c_str(),
|
||||
result.report_label.c_str());
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
||||
|
|
|
@ -154,4 +154,13 @@ std::string StringPrintF(const char* format, ...)
|
|||
return tmp;
|
||||
}
|
||||
|
||||
void ReplaceAll(std::string* str, const std::string& from,
|
||||
const std::string& to) {
|
||||
std::size_t start = 0;
|
||||
while((start = str->find(from, start)) != std::string::npos) {
|
||||
str->replace(start, from.length(), to);
|
||||
start += to.length();
|
||||
}
|
||||
}
|
||||
|
||||
} // end namespace benchmark
|
||||
|
|
|
@ -35,6 +35,9 @@ inline std::string StrCat(Args&&... args)
|
|||
return ss.str();
|
||||
}
|
||||
|
||||
void ReplaceAll(std::string* str, const std::string& from,
|
||||
const std::string& to);
|
||||
|
||||
} // end namespace benchmark
|
||||
|
||||
#endif // BENCHMARK_STRING_UTIL_H_
|
||||
|
|
Loading…
Reference in a new issue