2021-10-27 07:52:57 +00:00
|
|
|
#include <algorithm>
|
|
|
|
#include <cassert>
|
|
|
|
#include <cstdint>
|
|
|
|
#include <cstdlib>
|
|
|
|
#include <cstring>
|
|
|
|
#include <iostream>
|
|
|
|
#include <limits>
|
|
|
|
#include <string>
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
#include "benchmark/benchmark.h"
|
|
|
|
|
|
|
|
// Tests that we can override benchmark-spec value from FLAGS_benchmark_filter
|
|
|
|
// with argument to RunSpecifiedBenchmarks(...).
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
class TestReporter : public benchmark::ConsoleReporter {
|
|
|
|
public:
|
2023-01-09 17:52:18 +00:00
|
|
|
bool ReportContext(const Context& context) override {
|
2021-10-27 07:52:57 +00:00
|
|
|
return ConsoleReporter::ReportContext(context);
|
|
|
|
};
|
|
|
|
|
2023-01-09 17:52:18 +00:00
|
|
|
void ReportRuns(const std::vector<Run>& report) override {
|
2021-10-27 07:52:57 +00:00
|
|
|
assert(report.size() == 1);
|
|
|
|
matched_functions.push_back(report[0].run_name.function_name);
|
|
|
|
ConsoleReporter::ReportRuns(report);
|
|
|
|
};
|
|
|
|
|
|
|
|
TestReporter() {}
|
|
|
|
|
2023-01-09 17:52:18 +00:00
|
|
|
~TestReporter() override {}
|
2021-10-27 07:52:57 +00:00
|
|
|
|
|
|
|
const std::vector<std::string>& GetMatchedFunctions() const {
|
|
|
|
return matched_functions;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::vector<std::string> matched_functions;
|
|
|
|
};
|
|
|
|
|
|
|
|
} // end namespace
|
|
|
|
|
|
|
|
static void BM_NotChosen(benchmark::State& state) {
|
|
|
|
assert(false && "SHOULD NOT BE CALLED");
|
|
|
|
for (auto _ : state) {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_NotChosen);
|
|
|
|
|
|
|
|
static void BM_Chosen(benchmark::State& state) {
|
|
|
|
for (auto _ : state) {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_Chosen);
|
|
|
|
|
|
|
|
int main(int argc, char** argv) {
|
2021-10-29 10:48:56 +00:00
|
|
|
const std::string flag = "BM_NotChosen";
|
2021-10-27 07:52:57 +00:00
|
|
|
|
|
|
|
// Verify that argv specify --benchmark_filter=BM_NotChosen.
|
2021-10-29 10:48:56 +00:00
|
|
|
bool found = false;
|
2021-10-27 07:52:57 +00:00
|
|
|
for (int i = 0; i < argc; ++i) {
|
|
|
|
if (strcmp("--benchmark_filter=BM_NotChosen", argv[i]) == 0) {
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert(found);
|
|
|
|
|
|
|
|
benchmark::Initialize(&argc, argv);
|
|
|
|
|
|
|
|
// Check that the current flag value is reported accurately via the
|
|
|
|
// GetBenchmarkFilter() function.
|
2021-10-29 10:48:56 +00:00
|
|
|
if (flag != benchmark::GetBenchmarkFilter()) {
|
2021-10-27 07:52:57 +00:00
|
|
|
std::cerr
|
|
|
|
<< "Seeing different value for flags. GetBenchmarkFilter() returns ["
|
|
|
|
<< benchmark::GetBenchmarkFilter() << "] expected flag=[" << flag
|
|
|
|
<< "]\n";
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
TestReporter test_reporter;
|
|
|
|
const char* const spec = "BM_Chosen";
|
|
|
|
const size_t returned_count =
|
2021-11-10 16:22:31 +00:00
|
|
|
benchmark::RunSpecifiedBenchmarks(&test_reporter, spec);
|
2021-10-27 07:52:57 +00:00
|
|
|
assert(returned_count == 1);
|
|
|
|
const std::vector<std::string> matched_functions =
|
|
|
|
test_reporter.GetMatchedFunctions();
|
|
|
|
assert(matched_functions.size() == 1);
|
|
|
|
if (strcmp(spec, matched_functions.front().c_str()) != 0) {
|
2021-11-10 16:22:31 +00:00
|
|
|
std::cerr << "Expected benchmark [" << spec << "] to run, but got ["
|
|
|
|
<< matched_functions.front() << "]\n";
|
2021-10-27 07:52:57 +00:00
|
|
|
return 2;
|
|
|
|
}
|
2022-03-08 16:02:37 +00:00
|
|
|
|
|
|
|
// Test that SetBenchmarkFilter works.
|
|
|
|
const std::string golden_value = "golden_value";
|
|
|
|
benchmark::SetBenchmarkFilter(golden_value);
|
|
|
|
std::string current_value = benchmark::GetBenchmarkFilter();
|
|
|
|
if (golden_value != current_value) {
|
|
|
|
std::cerr << "Expected [" << golden_value
|
|
|
|
<< "] for --benchmark_filter but got [" << current_value << "]\n";
|
|
|
|
return 3;
|
|
|
|
}
|
2021-10-27 07:52:57 +00:00
|
|
|
return 0;
|
|
|
|
}
|