From e4a6e97b91da8109840b59697a3b77f93ade956c Mon Sep 17 00:00:00 2001 From: Snappy Team Date: Mon, 16 Nov 2020 15:10:23 +0000 Subject: [PATCH] Extend validate benchmarks over all types and also add a medley for validation. I also made the compression happen only once per benchmark. This way we get a cleaner measurement of #branch-misses using "perf stat". Compression suffers naturally from a large number of branch misses which was polluting the measurements. This showed that with the new decompression the branch misses is actually much lower then initially reported, only .2% and very stable, ie. doesn't really fluctuate with how you execute the benchmarks. PiperOrigin-RevId: 342628576 --- snappy_unittest.cc | 51 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/snappy_unittest.cc b/snappy_unittest.cc index d433545..0e207db 100644 --- a/snappy_unittest.cc +++ b/snappy_unittest.cc @@ -1284,27 +1284,33 @@ static void BM_UFlat(int iters, int arg) { } BENCHMARK(BM_UFlat)->DenseRange(0, ARRAYSIZE(files) - 1); -static void BM_UFlatMedley(testing::benchmark::State& state) { - constexpr int kFiles = ARRAYSIZE(files); +struct SourceFiles { + SourceFiles() { + for (int i = 0; i < kFiles; i++) { + std::string contents = + ReadTestDataFile(files[i].filename, files[i].size_limit); + max_size = std::max(max_size, contents.size()); + sizes[i] = contents.size(); + snappy::Compress(contents.data(), contents.size(), &zcontents[i]); + } + } + static constexpr int kFiles = ARRAYSIZE(files); std::string zcontents[kFiles]; size_t sizes[kFiles]; size_t max_size = 0; - for (int i = 0; i < kFiles; i++) { - std::string contents = - ReadTestDataFile(files[i].filename, files[i].size_limit); - max_size = std::max(max_size, contents.size()); - sizes[i] = contents.size(); - snappy::Compress(contents.data(), contents.size(), &zcontents[i]); - } +}; - std::vector dst(max_size); +static void BM_UFlatMedley(testing::benchmark::State& state) { + static const SourceFiles* const source = new SourceFiles(); + + std::vector dst(source->max_size); size_t processed = 0; for (auto s : state) { - for (int i = 0; i < kFiles; i++) { - CHECK(snappy::RawUncompress(zcontents[i].data(), zcontents[i].size(), - dst.data())); - processed += sizes[i]; + for (int i = 0; i < SourceFiles::kFiles; i++) { + CHECK(snappy::RawUncompress(source->zcontents[i].data(), + source->zcontents[i].size(), dst.data())); + processed += source->sizes[i]; } } SetBenchmarkBytesProcessed(processed); @@ -1332,7 +1338,22 @@ static void BM_UValidate(int iters, int arg) { } StopBenchmarkTiming(); } -BENCHMARK(BM_UValidate)->DenseRange(0, 4); +BENCHMARK(BM_UValidate)->DenseRange(0, ARRAYSIZE(files) - 1); + +static void BM_UValidateMedley(testing::benchmark::State& state) { + static const SourceFiles* const source = new SourceFiles(); + + size_t processed = 0; + for (auto s : state) { + for (int i = 0; i < SourceFiles::kFiles; i++) { + CHECK(snappy::IsValidCompressedBuffer(source->zcontents[i].data(), + source->zcontents[i].size())); + processed += source->sizes[i]; + } + } + SetBenchmarkBytesProcessed(processed); +} +BENCHMARK(BM_UValidateMedley); static void BM_UIOVec(int iters, int arg) { StopBenchmarkTiming();