2016-05-24 01:24:56 +00:00
|
|
|
|
|
|
|
#undef NDEBUG
|
|
|
|
#include <cassert>
|
|
|
|
#include <vector>
|
|
|
|
|
2016-10-07 18:04:50 +00:00
|
|
|
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
|
|
|
#include "benchmark/benchmark.h"
|
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
class TestReporter : public benchmark::ConsoleReporter {
|
|
|
|
public:
|
2021-05-11 11:56:00 +00:00
|
|
|
virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE {
|
2016-05-24 01:24:56 +00:00
|
|
|
return ConsoleReporter::ReportContext(context);
|
|
|
|
};
|
|
|
|
|
2021-05-11 11:56:00 +00:00
|
|
|
virtual void ReportRuns(const std::vector<Run>& report) BENCHMARK_OVERRIDE {
|
2016-05-24 01:24:56 +00:00
|
|
|
all_runs_.insert(all_runs_.end(), begin(report), end(report));
|
|
|
|
ConsoleReporter::ReportRuns(report);
|
|
|
|
}
|
|
|
|
|
2016-10-07 18:04:50 +00:00
|
|
|
TestReporter() {}
|
2016-05-24 01:24:56 +00:00
|
|
|
virtual ~TestReporter() {}
|
|
|
|
|
|
|
|
mutable std::vector<Run> all_runs_;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TestCase {
|
|
|
|
std::string name;
|
|
|
|
bool error_occurred;
|
|
|
|
std::string error_message;
|
|
|
|
|
|
|
|
typedef benchmark::BenchmarkReporter::Run Run;
|
|
|
|
|
|
|
|
void CheckRun(Run const& run) const {
|
2021-06-24 17:21:59 +00:00
|
|
|
BM_CHECK(name == run.benchmark_name())
|
Track two more details about runs - the aggregate name, and run name. (#675)
This is related to @BaaMeow's work in https://github.com/google/benchmark/pull/616 but is not based on it.
Two new fields are tracked, and dumped into JSON:
* If the run is an aggregate, the aggregate's name is stored.
It can be RMS, BigO, mean, median, stddev, or any custom stat name.
* The aggregate-name-less run name is additionally stored.
I.e. not some name of the benchmark function, but the actual
name, but without the 'aggregate name' suffix.
This way one can group/filter all the runs,
and filter by the particular aggregate type.
I *might* need this for further tooling improvement.
Or maybe not.
But this is certainly worthwhile for custom tooling.
2018-09-13 12:08:15 +00:00
|
|
|
<< "expected " << name << " got " << run.benchmark_name();
|
2021-06-24 17:21:59 +00:00
|
|
|
BM_CHECK(error_occurred == run.error_occurred);
|
|
|
|
BM_CHECK(error_message == run.error_message);
|
2016-05-24 01:24:56 +00:00
|
|
|
if (error_occurred) {
|
2021-06-24 17:21:59 +00:00
|
|
|
// BM_CHECK(run.iterations == 0);
|
2016-05-24 01:24:56 +00:00
|
|
|
} else {
|
2021-06-24 17:21:59 +00:00
|
|
|
BM_CHECK(run.iterations != 0);
|
2016-05-24 01:24:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
std::vector<TestCase> ExpectedResults;
|
|
|
|
|
|
|
|
int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
|
|
|
|
for (auto TC : v) {
|
|
|
|
TC.name = base_name + TC.name;
|
|
|
|
ExpectedResults.push_back(std::move(TC));
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define CONCAT(x, y) CONCAT2(x, y)
|
|
|
|
#define CONCAT2(x, y) x##y
|
2016-10-07 18:04:50 +00:00
|
|
|
#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
|
2016-05-24 01:24:56 +00:00
|
|
|
|
|
|
|
} // end namespace
|
|
|
|
|
2020-02-21 14:53:25 +00:00
|
|
|
void BM_error_no_running(benchmark::State& state) {
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_no_running);
|
|
|
|
ADD_CASES("BM_error_no_running", {{"", true, "error message"}});
|
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
void BM_error_before_running(benchmark::State& state) {
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
while (state.KeepRunning()) {
|
|
|
|
assert(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_before_running);
|
2016-10-07 18:04:50 +00:00
|
|
|
ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
|
2016-05-24 01:24:56 +00:00
|
|
|
|
2018-02-10 04:57:04 +00:00
|
|
|
void BM_error_before_running_batch(benchmark::State& state) {
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
while (state.KeepRunningBatch(17)) {
|
|
|
|
assert(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_before_running_batch);
|
|
|
|
ADD_CASES("BM_error_before_running_batch", {{"", true, "error message"}});
|
|
|
|
|
2017-10-17 16:24:13 +00:00
|
|
|
void BM_error_before_running_range_for(benchmark::State& state) {
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
for (auto _ : state) {
|
|
|
|
assert(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_before_running_range_for);
|
|
|
|
ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}});
|
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
void BM_error_during_running(benchmark::State& state) {
|
|
|
|
int first_iter = true;
|
|
|
|
while (state.KeepRunning()) {
|
2016-08-04 19:30:14 +00:00
|
|
|
if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
|
2016-05-24 01:24:56 +00:00
|
|
|
assert(first_iter);
|
|
|
|
first_iter = false;
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
} else {
|
|
|
|
state.PauseTiming();
|
|
|
|
state.ResumeTiming();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
|
2016-10-07 18:04:50 +00:00
|
|
|
ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
|
|
|
|
{"/1/threads:2", true, "error message"},
|
|
|
|
{"/1/threads:4", true, "error message"},
|
|
|
|
{"/1/threads:8", true, "error message"},
|
|
|
|
{"/2/threads:1", false, ""},
|
|
|
|
{"/2/threads:2", false, ""},
|
|
|
|
{"/2/threads:4", false, ""},
|
|
|
|
{"/2/threads:8", false, ""}});
|
2016-05-24 01:24:56 +00:00
|
|
|
|
2017-10-17 16:24:13 +00:00
|
|
|
void BM_error_during_running_ranged_for(benchmark::State& state) {
|
|
|
|
assert(state.max_iterations > 3 && "test requires at least a few iterations");
|
|
|
|
int first_iter = true;
|
|
|
|
// NOTE: Users should not write the for loop explicitly.
|
|
|
|
for (auto It = state.begin(), End = state.end(); It != End; ++It) {
|
|
|
|
if (state.range(0) == 1) {
|
|
|
|
assert(first_iter);
|
|
|
|
first_iter = false;
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
// Test the unfortunate but documented behavior that the ranged-for loop
|
|
|
|
// doesn't automatically terminate when SkipWithError is set.
|
|
|
|
assert(++It != End);
|
2018-06-01 10:14:19 +00:00
|
|
|
break; // Required behavior
|
2017-10-17 16:24:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_during_running_ranged_for)->Arg(1)->Arg(2)->Iterations(5);
|
|
|
|
ADD_CASES("BM_error_during_running_ranged_for",
|
|
|
|
{{"/1/iterations:5", true, "error message"},
|
|
|
|
{"/2/iterations:5", false, ""}});
|
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
void BM_error_after_running(benchmark::State& state) {
|
2017-10-17 16:24:13 +00:00
|
|
|
for (auto _ : state) {
|
2016-05-24 01:24:56 +00:00
|
|
|
benchmark::DoNotOptimize(state.iterations());
|
|
|
|
}
|
|
|
|
if (state.thread_index <= (state.threads / 2))
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
|
2016-10-07 18:04:50 +00:00
|
|
|
ADD_CASES("BM_error_after_running", {{"/threads:1", true, "error message"},
|
|
|
|
{"/threads:2", true, "error message"},
|
|
|
|
{"/threads:4", true, "error message"},
|
|
|
|
{"/threads:8", true, "error message"}});
|
2016-05-24 01:24:56 +00:00
|
|
|
|
|
|
|
void BM_error_while_paused(benchmark::State& state) {
|
|
|
|
bool first_iter = true;
|
|
|
|
while (state.KeepRunning()) {
|
2016-08-04 19:30:14 +00:00
|
|
|
if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
|
2016-05-24 01:24:56 +00:00
|
|
|
assert(first_iter);
|
|
|
|
first_iter = false;
|
|
|
|
state.PauseTiming();
|
|
|
|
state.SkipWithError("error message");
|
|
|
|
} else {
|
|
|
|
state.PauseTiming();
|
|
|
|
state.ResumeTiming();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
|
2016-10-07 18:04:50 +00:00
|
|
|
ADD_CASES("BM_error_while_paused", {{"/1/threads:1", true, "error message"},
|
|
|
|
{"/1/threads:2", true, "error message"},
|
|
|
|
{"/1/threads:4", true, "error message"},
|
|
|
|
{"/1/threads:8", true, "error message"},
|
|
|
|
{"/2/threads:1", false, ""},
|
|
|
|
{"/2/threads:2", false, ""},
|
|
|
|
{"/2/threads:4", false, ""},
|
|
|
|
{"/2/threads:8", false, ""}});
|
2016-05-24 01:24:56 +00:00
|
|
|
|
|
|
|
int main(int argc, char* argv[]) {
|
|
|
|
benchmark::Initialize(&argc, argv);
|
|
|
|
|
|
|
|
TestReporter test_reporter;
|
|
|
|
benchmark::RunSpecifiedBenchmarks(&test_reporter);
|
|
|
|
|
|
|
|
typedef benchmark::BenchmarkReporter::Run Run;
|
|
|
|
auto EB = ExpectedResults.begin();
|
|
|
|
|
|
|
|
for (Run const& run : test_reporter.all_runs_) {
|
|
|
|
assert(EB != ExpectedResults.end());
|
|
|
|
EB->CheckRun(run);
|
|
|
|
++EB;
|
|
|
|
}
|
|
|
|
assert(EB == ExpectedResults.end());
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|