mirror of https://github.com/google/benchmark.git
clang-tidy: readability-redundant and performance (#1298)
* clang-tidy: readability-redundant-* * clang-tidy: performance-*
This commit is contained in:
parent
fd258bbd13
commit
ab867074da
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
Checks: 'clang-analyzer-*'
|
||||
WarningsAsErrors: 'clang-analyzer-*'
|
||||
Checks: 'clang-analyzer-*,readability-redundant-*,performance-*'
|
||||
WarningsAsErrors: 'clang-analyzer-*,readability-redundant-*,performance-*'
|
||||
HeaderFilterRegex: '.*'
|
||||
AnalyzeTemporaryDtors: false
|
||||
FormatStyle: none
|
||||
|
|
|
@ -774,7 +774,7 @@ class State {
|
|||
bool finished_;
|
||||
bool error_occurred_;
|
||||
|
||||
private: // items we don't need on the first cache line
|
||||
// items we don't need on the first cache line
|
||||
std::vector<int64_t> range_;
|
||||
|
||||
int64_t complexity_n_;
|
||||
|
@ -1054,7 +1054,8 @@ class Benchmark {
|
|||
Benchmark* Complexity(BigOFunc* complexity);
|
||||
|
||||
// Add this statistics to be computed over all the values of benchmark run
|
||||
Benchmark* ComputeStatistics(std::string name, StatisticsFunc* statistics,
|
||||
Benchmark* ComputeStatistics(const std::string& name,
|
||||
StatisticsFunc* statistics,
|
||||
StatisticUnit unit = kTime);
|
||||
|
||||
// Support for running multiple copies of the same benchmark concurrently
|
||||
|
@ -1169,8 +1170,7 @@ class LambdaBenchmark : public Benchmark {
|
|||
|
||||
LambdaBenchmark(LambdaBenchmark const&) = delete;
|
||||
|
||||
private:
|
||||
template <class Lam>
|
||||
template <class Lam> // NOLINTNEXTLINE(readability-redundant-declaration)
|
||||
friend Benchmark* ::benchmark::RegisterBenchmark(const char*, Lam&&);
|
||||
|
||||
Lambda lambda_;
|
||||
|
@ -1338,7 +1338,7 @@ class Fixture : public internal::Benchmark {
|
|||
#define BENCHMARK_PRIVATE_DECLARE_F(BaseClass, Method) \
|
||||
class BaseClass##_##Method##_Benchmark : public BaseClass { \
|
||||
public: \
|
||||
BaseClass##_##Method##_Benchmark() : BaseClass() { \
|
||||
BaseClass##_##Method##_Benchmark() { \
|
||||
this->SetName(#BaseClass "/" #Method); \
|
||||
} \
|
||||
\
|
||||
|
@ -1349,7 +1349,7 @@ class Fixture : public internal::Benchmark {
|
|||
#define BENCHMARK_TEMPLATE1_PRIVATE_DECLARE_F(BaseClass, Method, a) \
|
||||
class BaseClass##_##Method##_Benchmark : public BaseClass<a> { \
|
||||
public: \
|
||||
BaseClass##_##Method##_Benchmark() : BaseClass<a>() { \
|
||||
BaseClass##_##Method##_Benchmark() { \
|
||||
this->SetName(#BaseClass "<" #a ">/" #Method); \
|
||||
} \
|
||||
\
|
||||
|
@ -1360,7 +1360,7 @@ class Fixture : public internal::Benchmark {
|
|||
#define BENCHMARK_TEMPLATE2_PRIVATE_DECLARE_F(BaseClass, Method, a, b) \
|
||||
class BaseClass##_##Method##_Benchmark : public BaseClass<a, b> { \
|
||||
public: \
|
||||
BaseClass##_##Method##_Benchmark() : BaseClass<a, b>() { \
|
||||
BaseClass##_##Method##_Benchmark() { \
|
||||
this->SetName(#BaseClass "<" #a "," #b ">/" #Method); \
|
||||
} \
|
||||
\
|
||||
|
@ -1372,7 +1372,7 @@ class Fixture : public internal::Benchmark {
|
|||
#define BENCHMARK_TEMPLATE_PRIVATE_DECLARE_F(BaseClass, Method, ...) \
|
||||
class BaseClass##_##Method##_Benchmark : public BaseClass<__VA_ARGS__> { \
|
||||
public: \
|
||||
BaseClass##_##Method##_Benchmark() : BaseClass<__VA_ARGS__>() { \
|
||||
BaseClass##_##Method##_Benchmark() { \
|
||||
this->SetName(#BaseClass "<" #__VA_ARGS__ ">/" #Method); \
|
||||
} \
|
||||
\
|
||||
|
@ -1539,7 +1539,6 @@ class BenchmarkReporter {
|
|||
complexity_n(0),
|
||||
report_big_o(false),
|
||||
report_rms(false),
|
||||
counters(),
|
||||
memory_result(NULL),
|
||||
allocs_per_iter(0.0) {}
|
||||
|
||||
|
@ -1676,10 +1675,7 @@ class ConsoleReporter : public BenchmarkReporter {
|
|||
OO_Defaults = OO_ColorTabular
|
||||
};
|
||||
explicit ConsoleReporter(OutputOptions opts_ = OO_Defaults)
|
||||
: output_options_(opts_),
|
||||
name_field_width_(0),
|
||||
prev_counters_(),
|
||||
printed_header_(false) {}
|
||||
: output_options_(opts_), name_field_width_(0), printed_header_(false) {}
|
||||
|
||||
virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE;
|
||||
virtual void ReportRuns(const std::vector<Run>& reports) BENCHMARK_OVERRIDE;
|
||||
|
|
|
@ -145,7 +145,6 @@ State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
|
|||
error_occurred_(false),
|
||||
range_(ranges),
|
||||
complexity_n_(0),
|
||||
counters(),
|
||||
thread_index_(thread_i),
|
||||
threads_(n_threads),
|
||||
timer_(timer),
|
||||
|
@ -434,7 +433,7 @@ size_t RunSpecifiedBenchmarks() {
|
|||
}
|
||||
|
||||
size_t RunSpecifiedBenchmarks(std::string spec) {
|
||||
return RunSpecifiedBenchmarks(nullptr, nullptr, spec);
|
||||
return RunSpecifiedBenchmarks(nullptr, nullptr, std::move(spec));
|
||||
}
|
||||
|
||||
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
|
||||
|
@ -444,7 +443,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
|
|||
|
||||
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
|
||||
std::string spec) {
|
||||
return RunSpecifiedBenchmarks(display_reporter, nullptr, spec);
|
||||
return RunSpecifiedBenchmarks(display_reporter, nullptr, std::move(spec));
|
||||
}
|
||||
|
||||
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
|
||||
|
|
|
@ -413,7 +413,7 @@ Benchmark* Benchmark::Complexity(BigOFunc* complexity) {
|
|||
return this;
|
||||
}
|
||||
|
||||
Benchmark* Benchmark::ComputeStatistics(std::string name,
|
||||
Benchmark* Benchmark::ComputeStatistics(const std::string& name,
|
||||
StatisticsFunc* statistics,
|
||||
StatisticUnit unit) {
|
||||
statistics_.emplace_back(name, statistics, unit);
|
||||
|
|
|
@ -36,7 +36,6 @@ class ThreadManager {
|
|||
[this]() { return alive_threads_ == 0; });
|
||||
}
|
||||
|
||||
public:
|
||||
struct Result {
|
||||
IterationCount iterations = 0;
|
||||
double real_time_used = 0;
|
||||
|
|
|
@ -37,7 +37,7 @@ class ArgsProductFixture : public ::benchmark::Fixture {
|
|||
virtual ~ArgsProductFixture() {
|
||||
if (actualValues != expectedValues) {
|
||||
std::cout << "EXPECTED\n";
|
||||
for (auto v : expectedValues) {
|
||||
for (const auto& v : expectedValues) {
|
||||
std::cout << "{";
|
||||
for (int64_t iv : v) {
|
||||
std::cout << iv << ", ";
|
||||
|
@ -45,7 +45,7 @@ class ArgsProductFixture : public ::benchmark::Fixture {
|
|||
std::cout << "}\n";
|
||||
}
|
||||
std::cout << "ACTUAL\n";
|
||||
for (auto v : actualValues) {
|
||||
for (const auto& v : actualValues) {
|
||||
std::cout << "{";
|
||||
for (int64_t iv : v) {
|
||||
std::cout << iv << ", ";
|
||||
|
|
|
@ -111,8 +111,8 @@ TEST_F(BenchmarkTest, Match1WithRandomInterleaving) {
|
|||
std::vector<std::string> interleaving;
|
||||
interleaving.push_back(queue->Get());
|
||||
interleaving.push_back(queue->Get());
|
||||
element_count[interleaving[0].c_str()]++;
|
||||
element_count[interleaving[1].c_str()]++;
|
||||
element_count[interleaving[0]]++;
|
||||
element_count[interleaving[1]]++;
|
||||
interleaving_count[StrFormat("%s,%s", interleaving[0].c_str(),
|
||||
interleaving[1].c_str())]++;
|
||||
}
|
||||
|
|
|
@ -13,9 +13,10 @@ namespace {
|
|||
#define ADD_COMPLEXITY_CASES(...) \
|
||||
int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
|
||||
|
||||
int AddComplexityTest(std::string test_name, std::string big_o_test_name,
|
||||
std::string rms_test_name, std::string big_o,
|
||||
int family_index) {
|
||||
int AddComplexityTest(const std::string &test_name,
|
||||
const std::string &big_o_test_name,
|
||||
const std::string &rms_test_name,
|
||||
const std::string &big_o, int family_index) {
|
||||
SetSubstitutions({{"%name", test_name},
|
||||
{"%bigo_name", big_o_test_name},
|
||||
{"%rms_name", rms_test_name},
|
||||
|
|
|
@ -42,7 +42,7 @@ class MultipleRangesFixture : public ::benchmark::Fixture {
|
|||
virtual ~MultipleRangesFixture() {
|
||||
if (actualValues != expectedValues) {
|
||||
std::cout << "EXPECTED\n";
|
||||
for (auto v : expectedValues) {
|
||||
for (const auto& v : expectedValues) {
|
||||
std::cout << "{";
|
||||
for (int64_t iv : v) {
|
||||
std::cout << iv << ", ";
|
||||
|
@ -50,7 +50,7 @@ class MultipleRangesFixture : public ::benchmark::Fixture {
|
|||
std::cout << "}\n";
|
||||
}
|
||||
std::cout << "ACTUAL\n";
|
||||
for (auto v : actualValues) {
|
||||
for (const auto& v : actualValues) {
|
||||
std::cout << "{";
|
||||
for (int64_t iv : v) {
|
||||
std::cout << iv << ", ";
|
||||
|
|
|
@ -85,7 +85,7 @@ std::string GetFileReporterOutput(int argc, char* argv[]);
|
|||
struct Results;
|
||||
typedef std::function<void(Results const&)> ResultsCheckFn;
|
||||
|
||||
size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn);
|
||||
size_t AddChecker(const char* bm_name_pattern, const ResultsCheckFn& fn);
|
||||
|
||||
// Class holding the results of a benchmark.
|
||||
// It is passed in calls to checker functions.
|
||||
|
|
|
@ -141,7 +141,7 @@ void CheckCases(TestCaseList const& checks, std::stringstream& output) {
|
|||
class TestReporter : public benchmark::BenchmarkReporter {
|
||||
public:
|
||||
TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
|
||||
: reporters_(reps) {}
|
||||
: reporters_(std::move(reps)) {}
|
||||
|
||||
virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE {
|
||||
bool last_ret = false;
|
||||
|
@ -183,7 +183,7 @@ class ResultsChecker {
|
|||
public:
|
||||
struct PatternAndFn : public TestCase { // reusing TestCase for its regexes
|
||||
PatternAndFn(const std::string& rx, ResultsCheckFn fn_)
|
||||
: TestCase(rx), fn(fn_) {}
|
||||
: TestCase(rx), fn(std::move(fn_)) {}
|
||||
ResultsCheckFn fn;
|
||||
};
|
||||
|
||||
|
@ -191,7 +191,7 @@ class ResultsChecker {
|
|||
std::vector<Results> results;
|
||||
std::vector<std::string> field_names;
|
||||
|
||||
void Add(const std::string& entry_pattern, ResultsCheckFn fn);
|
||||
void Add(const std::string& entry_pattern, const ResultsCheckFn& fn);
|
||||
|
||||
void CheckResults(std::stringstream& output);
|
||||
|
||||
|
@ -210,7 +210,8 @@ ResultsChecker& GetResultsChecker() {
|
|||
}
|
||||
|
||||
// add a results checker for a benchmark
|
||||
void ResultsChecker::Add(const std::string& entry_pattern, ResultsCheckFn fn) {
|
||||
void ResultsChecker::Add(const std::string& entry_pattern,
|
||||
const ResultsCheckFn& fn) {
|
||||
check_patterns.emplace_back(entry_pattern, fn);
|
||||
}
|
||||
|
||||
|
@ -299,7 +300,7 @@ std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
|
|||
|
||||
} // end namespace internal
|
||||
|
||||
size_t AddChecker(const char* bm_name, ResultsCheckFn fn) {
|
||||
size_t AddChecker(const char* bm_name, const ResultsCheckFn& fn) {
|
||||
auto& rc = internal::GetResultsChecker();
|
||||
rc.Add(bm_name, fn);
|
||||
return rc.results.size();
|
||||
|
|
|
@ -45,7 +45,7 @@ struct TestCase {
|
|||
std::vector<TestCase> ExpectedResults;
|
||||
|
||||
int AddCases(std::initializer_list<TestCase> const& v) {
|
||||
for (auto N : v) {
|
||||
for (const auto& N : v) {
|
||||
ExpectedResults.push_back(N);
|
||||
}
|
||||
return 0;
|
||||
|
|
|
@ -119,12 +119,13 @@ ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
|
|||
|
||||
void BM_error_during_running_ranged_for(benchmark::State& state) {
|
||||
assert(state.max_iterations > 3 && "test requires at least a few iterations");
|
||||
[[maybe_unused]] bool first_iter = true;
|
||||
bool first_iter = true;
|
||||
// NOTE: Users should not write the for loop explicitly.
|
||||
for (auto It = state.begin(), End = state.end(); It != End; ++It) {
|
||||
if (state.range(0) == 1) {
|
||||
assert(first_iter);
|
||||
first_iter = false;
|
||||
(void)first_iter;
|
||||
state.SkipWithError("error message");
|
||||
// Test the unfortunate but documented behavior that the ranged-for loop
|
||||
// doesn't automatically terminate when SkipWithError is set.
|
||||
|
|
Loading…
Reference in New Issue