format tests with clang-format (#1282)

This commit is contained in:
Dominic Hamon 2021-11-10 16:22:31 +00:00 committed by GitHub
parent fcef4fb669
commit c07a498924
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 126 additions and 142 deletions

View File

@ -1,10 +1,10 @@
#include "benchmark/benchmark.h"
#include <cassert>
#include <iostream>
#include <set>
#include <vector>
#include "benchmark/benchmark.h"
class ArgsProductFixture : public ::benchmark::Fixture {
public:
ArgsProductFixture()

View File

@ -96,7 +96,6 @@ void BM_empty_stop_start(benchmark::State& state) {
BENCHMARK(BM_empty_stop_start);
BENCHMARK(BM_empty_stop_start)->ThreadPerCpu();
void BM_KeepRunning(benchmark::State& state) {
benchmark::IterationCount iter_count = 0;
assert(iter_count == state.iterations());
@ -171,8 +170,10 @@ BENCHMARK(BM_TwoTemplateFunc<double, int>)->Arg(1);
// Ensure that StateIterator provides all the necessary typedefs required to
// instantiate std::iterator_traits.
static_assert(std::is_same<
typename std::iterator_traits<benchmark::State::StateIterator>::value_type,
typename benchmark::State::StateIterator::value_type>::value, "");
static_assert(
std::is_same<typename std::iterator_traits<
benchmark::State::StateIterator>::value_type,
typename benchmark::State::StateIterator::value_type>::value,
"");
BENCHMARK_MAIN();

View File

@ -93,8 +93,9 @@ static void BM_SetInsert(benchmark::State& state) {
state.SetBytesProcessed(state.iterations() * state.range(1) * sizeof(int));
}
// Test many inserts at once to reduce the total iterations needed. Otherwise, the slower,
// non-timed part of each iteration will make the benchmark take forever.
// Test many inserts at once to reduce the total iterations needed. Otherwise,
// the slower, non-timed part of each iteration will make the benchmark take
// forever.
BENCHMARK(BM_SetInsert)->Ranges({{1 << 10, 8 << 10}, {128, 512}});
template <typename Container,
@ -214,7 +215,8 @@ BENCHMARK_CAPTURE(BM_with_args, string_and_pair_test, std::string("abc"),
std::pair<int, double>(42, 3.8));
void BM_non_template_args(benchmark::State& state, int, double) {
while(state.KeepRunning()) {}
while (state.KeepRunning()) {
}
}
BENCHMARK_CAPTURE(BM_non_template_args, basic_test, 0, 0);

View File

@ -9,7 +9,6 @@ extern "C" {
extern int ExternInt;
extern int ExternInt2;
extern int ExternInt3;
}
// CHECK-LABEL: test_basic:

View File

@ -4,6 +4,7 @@
#include <cmath>
#include <cstdlib>
#include <vector>
#include "benchmark/benchmark.h"
#include "output_test.h"
@ -65,7 +66,7 @@ int AddComplexityTest(std::string test_name, std::string big_o_test_name,
// --------------------------- Testing BigO O(1) --------------------------- //
// ========================================================================= //
void BM_Complexity_O1(benchmark::State& state) {
void BM_Complexity_O1(benchmark::State &state) {
for (auto _ : state) {
for (int i = 0; i < 1024; ++i) {
benchmark::DoNotOptimize(&i);
@ -114,7 +115,7 @@ std::vector<int> ConstructRandomVector(int64_t size) {
return v;
}
void BM_Complexity_O_N(benchmark::State& state) {
void BM_Complexity_O_N(benchmark::State &state) {
auto v = ConstructRandomVector(state.range(0));
// Test worst case scenario (item not in vector)
const int64_t item_not_in_vector = state.range(0) * 2;
@ -156,7 +157,7 @@ ADD_COMPLEXITY_CASES(n_test_name, big_o_n_test_name, rms_o_n_test_name,
// ------------------------- Testing BigO O(N*lgN) ------------------------- //
// ========================================================================= //
static void BM_Complexity_O_N_log_N(benchmark::State& state) {
static void BM_Complexity_O_N_log_N(benchmark::State &state) {
auto v = ConstructRandomVector(state.range(0));
for (auto _ : state) {
std::sort(v.begin(), v.end());
@ -199,7 +200,7 @@ ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
// -------- Testing formatting of Complexity with captured args ------------ //
// ========================================================================= //
void BM_ComplexityCaptureArgs(benchmark::State& state, int n) {
void BM_ComplexityCaptureArgs(benchmark::State &state, int n) {
for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero
benchmark::DoNotOptimize(state.iterations());

View File

@ -44,8 +44,7 @@ BENCHMARK_TEMPLATE(BM_template1, long);
BENCHMARK_TEMPLATE1(BM_template1, int);
template <class T>
struct BM_Fixture : public ::benchmark::Fixture {
};
struct BM_Fixture : public ::benchmark::Fixture {};
BENCHMARK_TEMPLATE_F(BM_Fixture, BM_template1, long)(benchmark::State& state) {
BM_empty(state);
@ -55,8 +54,8 @@ BENCHMARK_TEMPLATE1_F(BM_Fixture, BM_template2, int)(benchmark::State& state) {
}
void BM_counters(benchmark::State& state) {
BM_empty(state);
state.counters["Foo"] = 2;
BM_empty(state);
state.counters["Foo"] = 2;
}
BENCHMARK(BM_counters);

View File

@ -26,7 +26,8 @@ void TestHandler() {
}
void try_invalid_pause_resume(benchmark::State& state) {
#if !defined(TEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) && !defined(TEST_HAS_NO_EXCEPTIONS)
#if !defined(TEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) && \
!defined(TEST_HAS_NO_EXCEPTIONS)
try {
state.PauseTiming();
std::abort();
@ -57,13 +58,12 @@ void BM_diagnostic_test(benchmark::State& state) {
}
BENCHMARK(BM_diagnostic_test);
void BM_diagnostic_test_keep_running(benchmark::State& state) {
static bool called_once = false;
if (called_once == false) try_invalid_pause_resume(state);
while(state.KeepRunning()) {
while (state.KeepRunning()) {
benchmark::DoNotOptimize(state.iterations());
}

View File

@ -15,7 +15,7 @@ inline int Add42(int x) { return x + 42; }
struct NotTriviallyCopyable {
NotTriviallyCopyable();
explicit NotTriviallyCopyable(int x) : value(x) {}
NotTriviallyCopyable(NotTriviallyCopyable const&);
NotTriviallyCopyable(NotTriviallyCopyable const &);
int value;
};
@ -23,7 +23,6 @@ struct Large {
int value;
int data[2];
};
}
// CHECK-LABEL: test_with_rvalue:
extern "C" void test_with_rvalue() {
@ -118,8 +117,7 @@ extern "C" int test_div_by_two(int input) {
// CHECK-LABEL: test_inc_integer:
extern "C" int test_inc_integer() {
int x = 0;
for (int i=0; i < 5; ++i)
benchmark::DoNotOptimize(++x);
for (int i = 0; i < 5; ++i) benchmark::DoNotOptimize(++x);
// CHECK: movl $1, [[DEST:.*]]
// CHECK: {{(addl \$1,|incl)}} [[DEST]]
// CHECK: {{(addl \$1,|incl)}} [[DEST]]
@ -147,7 +145,7 @@ extern "C" void test_pointer_const_lvalue() {
// CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z]+]])
// CHECK: ret
int x = 42;
int * const xp = &x;
int *const xp = &x;
benchmark::DoNotOptimize(xp);
}

View File

@ -1,27 +1,28 @@
#include "benchmark/benchmark.h"
#include <cstdint>
#include "benchmark/benchmark.h"
namespace {
#if defined(__GNUC__)
std::uint64_t double_up(const std::uint64_t x) __attribute__((const));
#endif
std::uint64_t double_up(const std::uint64_t x) { return x * 2; }
}
} // namespace
// Using DoNotOptimize on types like BitRef seem to cause a lot of problems
// with the inline assembly on both GCC and Clang.
struct BitRef {
int index;
unsigned char &byte;
unsigned char& byte;
public:
public:
static BitRef Make() {
static unsigned char arr[2] = {};
BitRef b(1, arr[0]);
return b;
}
private:
private:
BitRef(int i, unsigned char& b) : index(i), byte(b) {}
};

View File

@ -70,7 +70,7 @@ static void BM_FooBa(benchmark::State& state) {
}
BENCHMARK(BM_FooBa);
int main(int argc, char **argv) {
int main(int argc, char** argv) {
bool list_only = false;
for (int i = 0; i < argc; ++i)
list_only |= std::string(argv[i]).find("--benchmark_list_tests") !=

View File

@ -1,9 +1,9 @@
#include "benchmark/benchmark.h"
#include <cassert>
#include <memory>
#include "benchmark/benchmark.h"
#define FIXTURE_BECHMARK_NAME MyFixture
class FIXTURE_BECHMARK_NAME : public ::benchmark::Fixture {
@ -27,7 +27,7 @@ class FIXTURE_BECHMARK_NAME : public ::benchmark::Fixture {
std::unique_ptr<int> data;
};
BENCHMARK_F(FIXTURE_BECHMARK_NAME, Foo)(benchmark::State &st) {
BENCHMARK_F(FIXTURE_BECHMARK_NAME, Foo)(benchmark::State& st) {
assert(data.get() != nullptr);
assert(*data == 42);
for (auto _ : st) {

View File

@ -3,6 +3,7 @@
#include <chrono>
#include <thread>
#include "../src/timers.h"
#include "benchmark/benchmark.h"
#include "output_test.h"

View File

@ -1,8 +1,8 @@
#include "benchmark/benchmark.h"
#include <cstdlib>
#include <map>
#include "benchmark/benchmark.h"
namespace {
std::map<int, int> ConstructRandomMap(int size) {

View File

@ -1,10 +1,10 @@
#include "benchmark/benchmark.h"
#include <cassert>
#include <iostream>
#include <set>
#include <vector>
#include "benchmark/benchmark.h"
class MultipleRangesFixture : public ::benchmark::Fixture {
public:
MultipleRangesFixture()

View File

@ -1,7 +1,8 @@
#include "benchmark/benchmark.h"
#include <chrono>
#include <thread>
#include "benchmark/benchmark.h"
#if defined(NDEBUG)
#undef NDEBUG
#endif
@ -65,11 +66,9 @@ void BM_explicit_iteration_count(benchmark::State& state) {
// Test that the requested iteration count is respected.
assert(state.max_iterations == 42);
size_t actual_iterations = 0;
for (auto _ : state)
++actual_iterations;
for (auto _ : state) ++actual_iterations;
assert(state.iterations() == state.max_iterations);
assert(state.iterations() == 42);
}
BENCHMARK(BM_explicit_iteration_count)->Iterations(42);

View File

@ -113,9 +113,7 @@ struct Results {
return NumIterations() * GetTime(kRealTime);
}
// get the cpu_time duration of the benchmark in seconds
double DurationCPUTime() const {
return NumIterations() * GetTime(kCpuTime);
}
double DurationCPUTime() const { return NumIterations() * GetTime(kCpuTime); }
// get the string for a result by name, or nullptr if the name
// is not found

View File

@ -317,9 +317,7 @@ int Results::NumThreads() const {
return num;
}
double Results::NumIterations() const {
return GetAs<double>("iterations");
}
double Results::NumIterations() const { return GetAs<double>("iterations"); }
double Results::GetTime(BenchmarkTime which) const {
BM_CHECK(which == kCpuTime || which == kRealTime);
@ -468,9 +466,8 @@ static char RandomHexChar() {
static std::string GetRandomFileName() {
std::string model = "test.%%%%%%";
for (auto & ch : model) {
if (ch == '%')
ch = RandomHexChar();
for (auto& ch : model) {
if (ch == '%') ch = RandomHexChar();
}
return model;
}
@ -487,8 +484,7 @@ static std::string GetTempFileName() {
int retries = 3;
while (--retries) {
std::string name = GetRandomFileName();
if (!FileExists(name))
return name;
if (!FileExists(name)) return name;
}
std::cerr << "Failed to create unique temporary file name" << std::endl;
std::abort();

View File

@ -5,7 +5,7 @@
#ifndef GTEST_SKIP
struct MsgHandler {
void operator=(std::ostream&){}
void operator=(std::ostream&) {}
};
#define GTEST_SKIP() return MsgHandler() = std::cout
#endif

View File

@ -81,16 +81,14 @@ int main(int argc, char** argv) {
TestReporter test_reporter;
const char* const spec = "BM_Chosen";
const size_t returned_count =
benchmark::RunSpecifiedBenchmarks(&test_reporter,
spec);
benchmark::RunSpecifiedBenchmarks(&test_reporter, spec);
assert(returned_count == 1);
const std::vector<std::string> matched_functions =
test_reporter.GetMatchedFunctions();
assert(matched_functions.size() == 1);
if (strcmp(spec, matched_functions.front().c_str()) != 0) {
std::cerr
<< "Expected benchmark [" << spec << "] to run, but got ["
<< matched_functions.front() << "]\n";
std::cerr << "Expected benchmark [" << spec << "] to run, but got ["
<< matched_functions.front() << "]\n";
return 2;
}
return 0;

View File

@ -2,8 +2,8 @@
// statistics_test - Unit tests for src/statistics.cc
//===---------------------------------------------------------------------===//
#include "../src/string_util.h"
#include "../src/internal_macros.h"
#include "../src/string_util.h"
#include "gtest/gtest.h"
namespace {
@ -32,7 +32,8 @@ TEST(StringUtilTest, stoul) {
#elif ULONG_MAX == 0xFFFFFFFFFFFFFFFFul
{
size_t pos = 0;
EXPECT_EQ(0xFFFFFFFFFFFFFFFFul, benchmark::stoul("18446744073709551615", &pos));
EXPECT_EQ(0xFFFFFFFFFFFFFFFFul,
benchmark::stoul("18446744073709551615", &pos));
EXPECT_EQ(20ul, pos);
}
#endif
@ -62,91 +63,81 @@ TEST(StringUtilTest, stoul) {
EXPECT_EQ(4ul, pos);
}
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{
ASSERT_THROW(benchmark::stoul("this is a test"), std::invalid_argument);
}
{ ASSERT_THROW(benchmark::stoul("this is a test"), std::invalid_argument); }
#endif
}
TEST(StringUtilTest, stoi) {
{
size_t pos = 0;
EXPECT_EQ(0, benchmark::stoi("0", &pos));
EXPECT_EQ(1ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(-17, benchmark::stoi("-17", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1357, benchmark::stoi("1357", &pos));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16));
EXPECT_EQ(4ul, pos);
}
TEST(StringUtilTest, stoi){{size_t pos = 0;
EXPECT_EQ(0, benchmark::stoi("0", &pos));
EXPECT_EQ(1ul, pos);
} // namespace
{
size_t pos = 0;
EXPECT_EQ(-17, benchmark::stoi("-17", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1357, benchmark::stoi("1357", &pos));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16));
EXPECT_EQ(4ul, pos);
}
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{
ASSERT_THROW(benchmark::stoi("this is a test"), std::invalid_argument);
}
{ ASSERT_THROW(benchmark::stoi("this is a test"), std::invalid_argument); }
#endif
}
TEST(StringUtilTest, stod) {
{
size_t pos = 0;
EXPECT_EQ(0.0, benchmark::stod("0", &pos));
EXPECT_EQ(1ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(-84.0, benchmark::stod("-84", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1234.0, benchmark::stod("1234", &pos));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1.5, benchmark::stod("1.5", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
/* Note: exactly representable as double */
EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos));
EXPECT_EQ(8ul, pos);
}
TEST(StringUtilTest, stod){{size_t pos = 0;
EXPECT_EQ(0.0, benchmark::stod("0", &pos));
EXPECT_EQ(1ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(-84.0, benchmark::stod("-84", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1234.0, benchmark::stod("1234", &pos));
EXPECT_EQ(4ul, pos);
}
{
size_t pos = 0;
EXPECT_EQ(1.5, benchmark::stod("1.5", &pos));
EXPECT_EQ(3ul, pos);
}
{
size_t pos = 0;
/* Note: exactly representable as double */
EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos));
EXPECT_EQ(8ul, pos);
}
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{
ASSERT_THROW(benchmark::stod("this is a test"), std::invalid_argument);
}
{ ASSERT_THROW(benchmark::stod("this is a test"), std::invalid_argument); }
#endif
}

View File

@ -1,9 +1,9 @@
#include "benchmark/benchmark.h"
#include <cassert>
#include <memory>
#include "benchmark/benchmark.h"
template <typename T>
class MyFixture : public ::benchmark::Fixture {
public: