mirror of https://github.com/google/benchmark.git
Added the functionality for a dry run benchmark called through the cli argument --benchmark_dry_run. (#1851)
* Added benchmark_dry_run boolean flag to command line options * Dry run logic to exit early and override iterations, repetitions, min time, min warmup time * Changeddry run override logic structure and added dry run to context --------- Co-authored-by: Shaan <shaanmistry03@gmail.com> Co-authored-by: Shaan Mistry <49106143+Shaan-Mistry@users.noreply.github.com>
This commit is contained in:
parent
08fdf6eb84
commit
72ecc4ea67
|
@ -92,6 +92,11 @@ BM_DEFINE_double(benchmark_min_warmup_time, 0.0);
|
||||||
// standard deviation of the runs will be reported.
|
// standard deviation of the runs will be reported.
|
||||||
BM_DEFINE_int32(benchmark_repetitions, 1);
|
BM_DEFINE_int32(benchmark_repetitions, 1);
|
||||||
|
|
||||||
|
// If enabled, forces each benchmark to execute exactly one iteration and one
|
||||||
|
// repetition, bypassing any configured
|
||||||
|
// MinTime()/MinWarmUpTime()/Iterations()/Repetitions()
|
||||||
|
BM_DEFINE_bool(benchmark_dry_run, false);
|
||||||
|
|
||||||
// If set, enable random interleaving of repetitions of all benchmarks.
|
// If set, enable random interleaving of repetitions of all benchmarks.
|
||||||
// See http://github.com/google/benchmark/issues/1051 for details.
|
// See http://github.com/google/benchmark/issues/1051 for details.
|
||||||
BM_DEFINE_bool(benchmark_enable_random_interleaving, false);
|
BM_DEFINE_bool(benchmark_enable_random_interleaving, false);
|
||||||
|
@ -717,6 +722,7 @@ void ParseCommandLineFlags(int* argc, char** argv) {
|
||||||
&FLAGS_benchmark_min_warmup_time) ||
|
&FLAGS_benchmark_min_warmup_time) ||
|
||||||
ParseInt32Flag(argv[i], "benchmark_repetitions",
|
ParseInt32Flag(argv[i], "benchmark_repetitions",
|
||||||
&FLAGS_benchmark_repetitions) ||
|
&FLAGS_benchmark_repetitions) ||
|
||||||
|
ParseBoolFlag(argv[i], "benchmark_dry_run", &FLAGS_benchmark_dry_run) ||
|
||||||
ParseBoolFlag(argv[i], "benchmark_enable_random_interleaving",
|
ParseBoolFlag(argv[i], "benchmark_enable_random_interleaving",
|
||||||
&FLAGS_benchmark_enable_random_interleaving) ||
|
&FLAGS_benchmark_enable_random_interleaving) ||
|
||||||
ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
|
ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
|
||||||
|
@ -755,6 +761,9 @@ void ParseCommandLineFlags(int* argc, char** argv) {
|
||||||
if (FLAGS_benchmark_color.empty()) {
|
if (FLAGS_benchmark_color.empty()) {
|
||||||
PrintUsageAndExit();
|
PrintUsageAndExit();
|
||||||
}
|
}
|
||||||
|
if (FLAGS_benchmark_dry_run) {
|
||||||
|
AddCustomContext("dry_run", "true");
|
||||||
|
}
|
||||||
for (const auto& kv : FLAGS_benchmark_context) {
|
for (const auto& kv : FLAGS_benchmark_context) {
|
||||||
AddCustomContext(kv.first, kv.second);
|
AddCustomContext(kv.first, kv.second);
|
||||||
}
|
}
|
||||||
|
@ -783,6 +792,7 @@ void PrintDefaultHelp() {
|
||||||
" [--benchmark_min_time=`<integer>x` OR `<float>s` ]\n"
|
" [--benchmark_min_time=`<integer>x` OR `<float>s` ]\n"
|
||||||
" [--benchmark_min_warmup_time=<min_warmup_time>]\n"
|
" [--benchmark_min_warmup_time=<min_warmup_time>]\n"
|
||||||
" [--benchmark_repetitions=<num_repetitions>]\n"
|
" [--benchmark_repetitions=<num_repetitions>]\n"
|
||||||
|
" [--benchmark_dry_run={true|false}]\n"
|
||||||
" [--benchmark_enable_random_interleaving={true|false}]\n"
|
" [--benchmark_enable_random_interleaving={true|false}]\n"
|
||||||
" [--benchmark_report_aggregates_only={true|false}]\n"
|
" [--benchmark_report_aggregates_only={true|false}]\n"
|
||||||
" [--benchmark_display_aggregates_only={true|false}]\n"
|
" [--benchmark_display_aggregates_only={true|false}]\n"
|
||||||
|
|
|
@ -58,6 +58,14 @@
|
||||||
|
|
||||||
namespace benchmark {
|
namespace benchmark {
|
||||||
|
|
||||||
|
BM_DECLARE_bool(benchmark_dry_run);
|
||||||
|
BM_DECLARE_string(benchmark_min_time);
|
||||||
|
BM_DECLARE_double(benchmark_min_warmup_time);
|
||||||
|
BM_DECLARE_int32(benchmark_repetitions);
|
||||||
|
BM_DECLARE_bool(benchmark_report_aggregates_only);
|
||||||
|
BM_DECLARE_bool(benchmark_display_aggregates_only);
|
||||||
|
BM_DECLARE_string(benchmark_perf_counters);
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
MemoryManager* memory_manager = nullptr;
|
MemoryManager* memory_manager = nullptr;
|
||||||
|
@ -228,20 +236,29 @@ BenchmarkRunner::BenchmarkRunner(
|
||||||
: b(b_),
|
: b(b_),
|
||||||
reports_for_family(reports_for_family_),
|
reports_for_family(reports_for_family_),
|
||||||
parsed_benchtime_flag(ParseBenchMinTime(FLAGS_benchmark_min_time)),
|
parsed_benchtime_flag(ParseBenchMinTime(FLAGS_benchmark_min_time)),
|
||||||
min_time(ComputeMinTime(b_, parsed_benchtime_flag)),
|
min_time(FLAGS_benchmark_dry_run
|
||||||
min_warmup_time((!IsZero(b.min_time()) && b.min_warmup_time() > 0.0)
|
? 0
|
||||||
|
: ComputeMinTime(b_, parsed_benchtime_flag)),
|
||||||
|
min_warmup_time(
|
||||||
|
FLAGS_benchmark_dry_run
|
||||||
|
? 0
|
||||||
|
: ((!IsZero(b.min_time()) && b.min_warmup_time() > 0.0)
|
||||||
? b.min_warmup_time()
|
? b.min_warmup_time()
|
||||||
: FLAGS_benchmark_min_warmup_time),
|
: FLAGS_benchmark_min_warmup_time)),
|
||||||
warmup_done(!(min_warmup_time > 0.0)),
|
warmup_done(FLAGS_benchmark_dry_run ? true : !(min_warmup_time > 0.0)),
|
||||||
repeats(b.repetitions() != 0 ? b.repetitions()
|
repeats(FLAGS_benchmark_dry_run
|
||||||
: FLAGS_benchmark_repetitions),
|
? 1
|
||||||
|
: (b.repetitions() != 0 ? b.repetitions()
|
||||||
|
: FLAGS_benchmark_repetitions)),
|
||||||
has_explicit_iteration_count(b.iterations() != 0 ||
|
has_explicit_iteration_count(b.iterations() != 0 ||
|
||||||
parsed_benchtime_flag.tag ==
|
parsed_benchtime_flag.tag ==
|
||||||
BenchTimeType::ITERS),
|
BenchTimeType::ITERS),
|
||||||
pool(static_cast<size_t>(b.threads() - 1)),
|
pool(static_cast<size_t>(b.threads() - 1)),
|
||||||
iters(has_explicit_iteration_count
|
iters(FLAGS_benchmark_dry_run
|
||||||
|
? 1
|
||||||
|
: (has_explicit_iteration_count
|
||||||
? ComputeIters(b_, parsed_benchtime_flag)
|
? ComputeIters(b_, parsed_benchtime_flag)
|
||||||
: 1),
|
: 1)),
|
||||||
perf_counters_measurement_ptr(pcm_) {
|
perf_counters_measurement_ptr(pcm_) {
|
||||||
run_results.display_report_aggregates_only =
|
run_results.display_report_aggregates_only =
|
||||||
(FLAGS_benchmark_report_aggregates_only ||
|
(FLAGS_benchmark_report_aggregates_only ||
|
||||||
|
@ -339,7 +356,7 @@ bool BenchmarkRunner::ShouldReportIterationResults(
|
||||||
// Determine if this run should be reported;
|
// Determine if this run should be reported;
|
||||||
// Either it has run for a sufficient amount of time
|
// Either it has run for a sufficient amount of time
|
||||||
// or because an error was reported.
|
// or because an error was reported.
|
||||||
return i.results.skipped_ ||
|
return i.results.skipped_ || FLAGS_benchmark_dry_run ||
|
||||||
i.iters >= kMaxIterations || // Too many iterations already.
|
i.iters >= kMaxIterations || // Too many iterations already.
|
||||||
i.seconds >=
|
i.seconds >=
|
||||||
GetMinTimeToApply() || // The elapsed time is large enough.
|
GetMinTimeToApply() || // The elapsed time is large enough.
|
||||||
|
|
|
@ -25,13 +25,6 @@
|
||||||
|
|
||||||
namespace benchmark {
|
namespace benchmark {
|
||||||
|
|
||||||
BM_DECLARE_string(benchmark_min_time);
|
|
||||||
BM_DECLARE_double(benchmark_min_warmup_time);
|
|
||||||
BM_DECLARE_int32(benchmark_repetitions);
|
|
||||||
BM_DECLARE_bool(benchmark_report_aggregates_only);
|
|
||||||
BM_DECLARE_bool(benchmark_display_aggregates_only);
|
|
||||||
BM_DECLARE_string(benchmark_perf_counters);
|
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
extern MemoryManager* memory_manager;
|
extern MemoryManager* memory_manager;
|
||||||
|
|
Loading…
Reference in New Issue