mirror of https://github.com/google/benchmark.git
Bind more State methods/attributes to Python (#1037)
* Bind Counter to Python * Bind State methods to Python * Bind state.counters to Python * Import _benchmark.Counter * Add Python example of state usage Co-authored-by: Dominic Hamon <dominichamon@users.noreply.github.com>
This commit is contained in:
parent
beb360d03e
commit
12e85b2eeb
|
@ -29,10 +29,12 @@ Example usage:
|
|||
|
||||
from absl import app
|
||||
from google_benchmark import _benchmark
|
||||
from google_benchmark._benchmark import Counter
|
||||
|
||||
__all__ = [
|
||||
"register",
|
||||
"main",
|
||||
"Counter",
|
||||
]
|
||||
|
||||
__version__ = "0.1.0"
|
||||
|
@ -54,7 +56,7 @@ def _flags_parser(argv):
|
|||
|
||||
def _run_benchmarks(argv):
|
||||
if len(argv) > 1:
|
||||
raise app.UsageError('Too many command-line arguments.')
|
||||
raise app.UsageError("Too many command-line arguments.")
|
||||
return _benchmark.RunSpecifiedBenchmarks()
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,17 @@
|
|||
// Benchmark for Python.
|
||||
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "pybind11/operators.h"
|
||||
#include "pybind11/pybind11.h"
|
||||
#include "pybind11/stl.h"
|
||||
#include "pybind11/stl_bind.h"
|
||||
|
||||
PYBIND11_MAKE_OPAQUE(benchmark::UserCounters);
|
||||
|
||||
namespace {
|
||||
namespace py = ::pybind11;
|
||||
|
@ -29,9 +38,8 @@ std::vector<std::string> Initialize(const std::vector<std::string>& argv) {
|
|||
}
|
||||
|
||||
void RegisterBenchmark(const char* name, py::function f) {
|
||||
benchmark::RegisterBenchmark(name, [f](benchmark::State& state) {
|
||||
f(&state);
|
||||
});
|
||||
benchmark::RegisterBenchmark(name,
|
||||
[f](benchmark::State& state) { f(&state); });
|
||||
}
|
||||
|
||||
PYBIND11_MODULE(_benchmark, m) {
|
||||
|
@ -40,9 +48,61 @@ PYBIND11_MODULE(_benchmark, m) {
|
|||
m.def("RunSpecifiedBenchmarks",
|
||||
[]() { benchmark::RunSpecifiedBenchmarks(); });
|
||||
|
||||
py::class_<benchmark::State>(m, "State")
|
||||
.def("__bool__", &benchmark::State::KeepRunning)
|
||||
.def_property_readonly("keep_running", &benchmark::State::KeepRunning)
|
||||
.def("skip_with_error", &benchmark::State::SkipWithError);
|
||||
using benchmark::Counter;
|
||||
py::class_<Counter> py_counter(m, "Counter");
|
||||
|
||||
py::enum_<Counter::Flags>(py_counter, "Flags")
|
||||
.value("kDefaults", Counter::Flags::kDefaults)
|
||||
.value("kIsRate", Counter::Flags::kIsRate)
|
||||
.value("kAvgThreads", Counter::Flags::kAvgThreads)
|
||||
.value("kAvgThreadsRate", Counter::Flags::kAvgThreadsRate)
|
||||
.value("kIsIterationInvariant", Counter::Flags::kIsIterationInvariant)
|
||||
.value("kIsIterationInvariantRate",
|
||||
Counter::Flags::kIsIterationInvariantRate)
|
||||
.value("kAvgIterations", Counter::Flags::kAvgIterations)
|
||||
.value("kAvgIterationsRate", Counter::Flags::kAvgIterationsRate)
|
||||
.value("kInvert", Counter::Flags::kInvert)
|
||||
.export_values()
|
||||
.def(py::self | py::self);
|
||||
|
||||
py::enum_<Counter::OneK>(py_counter, "OneK")
|
||||
.value("kIs1000", Counter::OneK::kIs1000)
|
||||
.value("kIs1024", Counter::OneK::kIs1024)
|
||||
.export_values();
|
||||
|
||||
py_counter
|
||||
.def(py::init<double, Counter::Flags, Counter::OneK>(),
|
||||
py::arg("value") = 0., py::arg("flags") = Counter::kDefaults,
|
||||
py::arg("k") = Counter::kIs1000)
|
||||
.def(py::init([](double value) { return Counter(value); }))
|
||||
.def_readwrite("value", &Counter::value)
|
||||
.def_readwrite("flags", &Counter::flags)
|
||||
.def_readwrite("oneK", &Counter::oneK);
|
||||
py::implicitly_convertible<py::float_, Counter>();
|
||||
py::implicitly_convertible<py::int_, Counter>();
|
||||
|
||||
py::bind_map<benchmark::UserCounters>(m, "UserCounters");
|
||||
|
||||
using benchmark::State;
|
||||
py::class_<State>(m, "State")
|
||||
.def("__bool__", &State::KeepRunning)
|
||||
.def_property_readonly("keep_running", &State::KeepRunning)
|
||||
.def("pause_timing", &State::PauseTiming)
|
||||
.def("resume_timing", &State::ResumeTiming)
|
||||
.def("skip_with_error", &State::SkipWithError)
|
||||
.def_property_readonly("error_occured", &State::error_occurred)
|
||||
.def("set_iteration_time", &State::SetIterationTime)
|
||||
.def_property("bytes_processed", &State::bytes_processed,
|
||||
&State::SetBytesProcessed)
|
||||
.def_property("complexity_n", &State::complexity_length_n,
|
||||
&State::SetComplexityN)
|
||||
.def_property("items_processed", &State::items_processed,
|
||||
&State::SetItemsProcessed)
|
||||
.def("set_label", (void (State::*)(const char*)) & State::SetLabel)
|
||||
.def("range", &State::range, py::arg("pos") = 0)
|
||||
.def_property_readonly("iterations", &State::iterations)
|
||||
.def_readwrite("counters", &State::counters)
|
||||
.def_readonly("thread_index", &State::thread_index)
|
||||
.def_readonly("threads", &State::threads);
|
||||
};
|
||||
} // namespace
|
||||
|
|
|
@ -20,7 +20,11 @@ In the extracted directory, execute:
|
|||
python setup.py install
|
||||
"""
|
||||
|
||||
import random
|
||||
import time
|
||||
|
||||
import google_benchmark as benchmark
|
||||
from google_benchmark import Counter
|
||||
|
||||
|
||||
@benchmark.register
|
||||
|
@ -34,15 +38,59 @@ def sum_million(state):
|
|||
while state:
|
||||
sum(range(1_000_000))
|
||||
|
||||
@benchmark.register
|
||||
def pause_timing(state):
|
||||
"""Pause timing every iteration."""
|
||||
while state:
|
||||
# Construct a list of random ints every iteration without timing it
|
||||
state.pause_timing()
|
||||
random_list = [random.randint(0, 100) for _ in range(100)]
|
||||
state.resume_timing()
|
||||
# Time the in place sorting algorithm
|
||||
random_list.sort()
|
||||
|
||||
|
||||
@benchmark.register
|
||||
def skipped(state):
|
||||
if True: # Test some predicate here.
|
||||
state.skip_with_error('some error')
|
||||
state.skip_with_error("some error")
|
||||
return # NOTE: You must explicitly return, or benchmark will continue.
|
||||
|
||||
# Benchmark code would be here.
|
||||
... # Benchmark code would be here.
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@benchmark.register
|
||||
def manual_timing(state):
|
||||
while state:
|
||||
# Manually count Python CPU time
|
||||
start = time.perf_counter() # perf_counter_ns() in Python 3.7+
|
||||
# Somehting to benchmark
|
||||
time.sleep(0.01)
|
||||
end = time.perf_counter()
|
||||
state.set_iteration_time(end - start)
|
||||
|
||||
|
||||
@benchmark.register
|
||||
def custom_counters(state):
|
||||
"""Collect cutom metric using benchmark.Counter."""
|
||||
num_foo = 0.0
|
||||
while state:
|
||||
# Benchmark some code here
|
||||
pass
|
||||
# Collect some custom metric named foo
|
||||
num_foo += 0.13
|
||||
|
||||
# Automatic Counter from numbers.
|
||||
state.counters["foo"] = num_foo
|
||||
# Set a counter as a rate.
|
||||
state.counters["foo_rate"] = Counter(num_foo, Counter.kIsRate)
|
||||
# Set a counter as an inverse of rate.
|
||||
state.counters["foo_inv_rate"] = Counter(num_foo, Counter.kIsRate | Counter.kInvert)
|
||||
# Set a counter as a thread-average quantity.
|
||||
state.counters["foo_avg"] = Counter(num_foo, Counter.kAvgThreads)
|
||||
# There's also a combined flag:
|
||||
state.counters["foo_avg_rate"] = Counter(num_foo, Counter.kAvgThreadsRate)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
benchmark.main()
|
||||
|
|
Loading…
Reference in New Issue