mirror of https://github.com/google/benchmark.git
BENCHMARK_CAPTURE() and Complexity() - naming problem (#761)
Created BenchmarkName class which holds the full benchmark name and allows specifying and retrieving different components of the name (e.g. ARGS, THREADS etc.) Fixes #730.
This commit is contained in:
parent
df7c7ee1d3
commit
f6e96861a3
1
AUTHORS
1
AUTHORS
|
@ -13,6 +13,7 @@ Andriy Berestovskyy <berestovskyy@gmail.com>
|
||||||
Arne Beer <arne@twobeer.de>
|
Arne Beer <arne@twobeer.de>
|
||||||
Carto
|
Carto
|
||||||
Christopher Seymour <chris.j.seymour@hotmail.com>
|
Christopher Seymour <chris.j.seymour@hotmail.com>
|
||||||
|
Daniel Harvey <danielharvey458@gmail.com>
|
||||||
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
|
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
|
||||||
Deniz Evrenci <denizevrenci@gmail.com>
|
Deniz Evrenci <denizevrenci@gmail.com>
|
||||||
Dirac Research
|
Dirac Research
|
||||||
|
|
|
@ -29,6 +29,7 @@ Billy Robert O'Neal III <billy.oneal@gmail.com> <bion@microsoft.com>
|
||||||
Chris Kennelly <ckennelly@google.com> <ckennelly@ckennelly.com>
|
Chris Kennelly <ckennelly@google.com> <ckennelly@ckennelly.com>
|
||||||
Christopher Seymour <chris.j.seymour@hotmail.com>
|
Christopher Seymour <chris.j.seymour@hotmail.com>
|
||||||
Cyrille Faucheux <cyrille.faucheux@gmail.com>
|
Cyrille Faucheux <cyrille.faucheux@gmail.com>
|
||||||
|
Daniel Harvey <danielharvey458@gmail.com>
|
||||||
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
|
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
|
||||||
Deniz Evrenci <denizevrenci@gmail.com>
|
Deniz Evrenci <denizevrenci@gmail.com>
|
||||||
Dominic Hamon <dma@stripysock.com> <dominic@google.com>
|
Dominic Hamon <dma@stripysock.com> <dominic@google.com>
|
||||||
|
|
|
@ -1302,6 +1302,23 @@ struct SystemInfo {
|
||||||
BENCHMARK_DISALLOW_COPY_AND_ASSIGN(SystemInfo);
|
BENCHMARK_DISALLOW_COPY_AND_ASSIGN(SystemInfo);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// BenchmarkName contains the components of the Benchmark's name
|
||||||
|
// which allows individual fields to be modified or cleared before
|
||||||
|
// building the final name using 'str()'.
|
||||||
|
struct BenchmarkName {
|
||||||
|
std::string function_name;
|
||||||
|
std::string args;
|
||||||
|
std::string min_time;
|
||||||
|
std::string iterations;
|
||||||
|
std::string repetitions;
|
||||||
|
std::string time_type;
|
||||||
|
std::string threads;
|
||||||
|
|
||||||
|
// Return the full name of the benchmark with each non-empty
|
||||||
|
// field separated by a '/'
|
||||||
|
std::string str() const;
|
||||||
|
};
|
||||||
|
|
||||||
// Interface for custom benchmark result printers.
|
// Interface for custom benchmark result printers.
|
||||||
// By default, benchmark reports are printed to stdout. However an application
|
// By default, benchmark reports are printed to stdout. However an application
|
||||||
// can control the destination of the reports by calling
|
// can control the destination of the reports by calling
|
||||||
|
@ -1340,7 +1357,7 @@ class BenchmarkReporter {
|
||||||
max_bytes_used(0) {}
|
max_bytes_used(0) {}
|
||||||
|
|
||||||
std::string benchmark_name() const;
|
std::string benchmark_name() const;
|
||||||
std::string run_name;
|
BenchmarkName run_name;
|
||||||
RunType run_type; // is this a measurement, or an aggregate?
|
RunType run_type; // is this a measurement, or an aggregate?
|
||||||
std::string aggregate_name;
|
std::string aggregate_name;
|
||||||
std::string report_label; // Empty if not set by benchmark.
|
std::string report_label; // Empty if not set by benchmark.
|
||||||
|
|
|
@ -233,7 +233,7 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
|
||||||
size_t stat_field_width = 0;
|
size_t stat_field_width = 0;
|
||||||
for (const BenchmarkInstance& benchmark : benchmarks) {
|
for (const BenchmarkInstance& benchmark : benchmarks) {
|
||||||
name_field_width =
|
name_field_width =
|
||||||
std::max<size_t>(name_field_width, benchmark.name.size());
|
std::max<size_t>(name_field_width, benchmark.name.str().size());
|
||||||
might_have_aggregates |= benchmark.repetitions > 1;
|
might_have_aggregates |= benchmark.repetitions > 1;
|
||||||
|
|
||||||
for (const auto& Stat : *benchmark.statistics)
|
for (const auto& Stat : *benchmark.statistics)
|
||||||
|
@ -393,7 +393,8 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FLAGS_benchmark_list_tests) {
|
if (FLAGS_benchmark_list_tests) {
|
||||||
for (auto const& benchmark : benchmarks) Out << benchmark.name << "\n";
|
for (auto const& benchmark : benchmarks)
|
||||||
|
Out << benchmark.name.str() << "\n";
|
||||||
} else {
|
} else {
|
||||||
internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
|
internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ namespace internal {
|
||||||
|
|
||||||
// Information kept per benchmark we may want to run
|
// Information kept per benchmark we may want to run
|
||||||
struct BenchmarkInstance {
|
struct BenchmarkInstance {
|
||||||
std::string name;
|
BenchmarkName name;
|
||||||
Benchmark* benchmark;
|
Benchmark* benchmark;
|
||||||
AggregationReportMode aggregation_report_mode;
|
AggregationReportMode aggregation_report_mode;
|
||||||
std::vector<int64_t> arg;
|
std::vector<int64_t> arg;
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
// Copyright 2015 Google Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include <benchmark/benchmark.h>
|
||||||
|
|
||||||
|
namespace benchmark {
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
// Compute the total size of a pack of std::strings
|
||||||
|
size_t size_impl() { return 0; }
|
||||||
|
|
||||||
|
template <typename Head, typename... Tail>
|
||||||
|
size_t size_impl(const Head& head, const Tail&... tail) {
|
||||||
|
return head.size() + size_impl(tail...);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Join a pack of std::strings using a delimiter
|
||||||
|
// TODO: use absl::StrJoin
|
||||||
|
void join_impl(std::string&, char) {}
|
||||||
|
|
||||||
|
template <typename Head, typename... Tail>
|
||||||
|
void join_impl(std::string& s, const char delimiter, const Head& head,
|
||||||
|
const Tail&... tail) {
|
||||||
|
if (!s.empty() && !head.empty()) {
|
||||||
|
s += delimiter;
|
||||||
|
}
|
||||||
|
|
||||||
|
s += head;
|
||||||
|
|
||||||
|
join_impl(s, delimiter, tail...);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
std::string join(char delimiter, const Ts&... ts) {
|
||||||
|
std::string s;
|
||||||
|
s.reserve(sizeof...(Ts) + size_impl(ts...));
|
||||||
|
join_impl(s, delimiter, ts...);
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
std::string BenchmarkName::str() const {
|
||||||
|
return join('/', function_name, args, min_time, iterations, repetitions,
|
||||||
|
time_type, threads);
|
||||||
|
}
|
||||||
|
} // namespace benchmark
|
|
@ -153,7 +153,7 @@ bool BenchmarkFamilies::FindBenchmarks(
|
||||||
for (auto const& args : family->args_) {
|
for (auto const& args : family->args_) {
|
||||||
for (int num_threads : *thread_counts) {
|
for (int num_threads : *thread_counts) {
|
||||||
BenchmarkInstance instance;
|
BenchmarkInstance instance;
|
||||||
instance.name = family->name_;
|
instance.name.function_name = family->name_;
|
||||||
instance.benchmark = family.get();
|
instance.benchmark = family.get();
|
||||||
instance.aggregation_report_mode = family->aggregation_report_mode_;
|
instance.aggregation_report_mode = family->aggregation_report_mode_;
|
||||||
instance.arg = args;
|
instance.arg = args;
|
||||||
|
@ -172,45 +172,51 @@ bool BenchmarkFamilies::FindBenchmarks(
|
||||||
// Add arguments to instance name
|
// Add arguments to instance name
|
||||||
size_t arg_i = 0;
|
size_t arg_i = 0;
|
||||||
for (auto const& arg : args) {
|
for (auto const& arg : args) {
|
||||||
instance.name += "/";
|
if (!instance.name.args.empty()) {
|
||||||
|
instance.name.args += '/';
|
||||||
|
}
|
||||||
|
|
||||||
if (arg_i < family->arg_names_.size()) {
|
if (arg_i < family->arg_names_.size()) {
|
||||||
const auto& arg_name = family->arg_names_[arg_i];
|
const auto& arg_name = family->arg_names_[arg_i];
|
||||||
if (!arg_name.empty()) {
|
if (!arg_name.empty()) {
|
||||||
instance.name +=
|
instance.name.args += StrFormat("%s:", arg_name.c_str());
|
||||||
StrFormat("%s:", family->arg_names_[arg_i].c_str());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// we know that the args are always non-negative (see 'AddRange()'),
|
// we know that the args are always non-negative (see 'AddRange()'),
|
||||||
// thus print as 'unsigned'. BUT, do a cast due to the 32-bit builds.
|
// thus print as 'unsigned'. BUT, do a cast due to the 32-bit builds.
|
||||||
instance.name += StrFormat("%lu", static_cast<unsigned long>(arg));
|
instance.name.args +=
|
||||||
|
StrFormat("%lu", static_cast<unsigned long>(arg));
|
||||||
|
|
||||||
++arg_i;
|
++arg_i;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!IsZero(family->min_time_))
|
if (!IsZero(family->min_time_))
|
||||||
instance.name += StrFormat("/min_time:%0.3f", family->min_time_);
|
instance.name.min_time =
|
||||||
|
StrFormat("min_time:%0.3f", family->min_time_);
|
||||||
if (family->iterations_ != 0) {
|
if (family->iterations_ != 0) {
|
||||||
instance.name +=
|
instance.name.iterations =
|
||||||
StrFormat("/iterations:%lu",
|
StrFormat("iterations:%lu",
|
||||||
static_cast<unsigned long>(family->iterations_));
|
static_cast<unsigned long>(family->iterations_));
|
||||||
}
|
}
|
||||||
if (family->repetitions_ != 0)
|
if (family->repetitions_ != 0)
|
||||||
instance.name += StrFormat("/repeats:%d", family->repetitions_);
|
instance.name.repetitions =
|
||||||
|
StrFormat("repeats:%d", family->repetitions_);
|
||||||
|
|
||||||
if (family->use_manual_time_) {
|
if (family->use_manual_time_) {
|
||||||
instance.name += "/manual_time";
|
instance.name.time_type = "manual_time";
|
||||||
} else if (family->use_real_time_) {
|
} else if (family->use_real_time_) {
|
||||||
instance.name += "/real_time";
|
instance.name.time_type = "real_time";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the number of threads used to the name
|
// Add the number of threads used to the name
|
||||||
if (!family->thread_counts_.empty()) {
|
if (!family->thread_counts_.empty()) {
|
||||||
instance.name += StrFormat("/threads:%d", instance.threads);
|
instance.name.threads = StrFormat("threads:%d", instance.threads);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((re.Match(instance.name) && !isNegativeFilter) ||
|
const auto full_name = instance.name.str();
|
||||||
(!re.Match(instance.name) && isNegativeFilter)) {
|
if ((re.Match(full_name) && !isNegativeFilter) ||
|
||||||
|
(!re.Match(full_name) && isNegativeFilter)) {
|
||||||
instance.last_benchmark_instance = (&args == &family->args_.back());
|
instance.last_benchmark_instance = (&args == &family->args_.back());
|
||||||
benchmarks->push_back(std::move(instance));
|
benchmarks->push_back(std::move(instance));
|
||||||
}
|
}
|
||||||
|
|
|
@ -191,7 +191,7 @@ class BenchmarkRunner {
|
||||||
double seconds;
|
double seconds;
|
||||||
};
|
};
|
||||||
IterationResults DoNIterations() {
|
IterationResults DoNIterations() {
|
||||||
VLOG(2) << "Running " << b.name << " for " << iters << "\n";
|
VLOG(2) << "Running " << b.name.str() << " for " << iters << "\n";
|
||||||
|
|
||||||
std::unique_ptr<internal::ThreadManager> manager;
|
std::unique_ptr<internal::ThreadManager> manager;
|
||||||
manager.reset(new internal::ThreadManager(b.threads));
|
manager.reset(new internal::ThreadManager(b.threads));
|
||||||
|
|
|
@ -183,8 +183,9 @@ std::vector<BenchmarkReporter::Run> ComputeBigO(
|
||||||
result_real = MinimalLeastSq(n, real_time, result_cpu.complexity);
|
result_real = MinimalLeastSq(n, real_time, result_cpu.complexity);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string run_name = reports[0].benchmark_name().substr(
|
// Drop the 'args' when reporting complexity.
|
||||||
0, reports[0].benchmark_name().find('/'));
|
auto run_name = reports[0].run_name;
|
||||||
|
run_name.args.clear();
|
||||||
|
|
||||||
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
||||||
Run big_o;
|
Run big_o;
|
||||||
|
|
|
@ -168,7 +168,7 @@ void JSONReporter::PrintRunData(Run const& run) {
|
||||||
std::string indent(6, ' ');
|
std::string indent(6, ' ');
|
||||||
std::ostream& out = GetOutputStream();
|
std::ostream& out = GetOutputStream();
|
||||||
out << indent << FormatKV("name", run.benchmark_name()) << ",\n";
|
out << indent << FormatKV("name", run.benchmark_name()) << ",\n";
|
||||||
out << indent << FormatKV("run_name", run.run_name) << ",\n";
|
out << indent << FormatKV("run_name", run.run_name.str()) << ",\n";
|
||||||
out << indent << FormatKV("run_type", [&run]() -> const char* {
|
out << indent << FormatKV("run_type", [&run]() -> const char* {
|
||||||
switch (run.run_type) {
|
switch (run.run_type) {
|
||||||
case BenchmarkReporter::Run::RT_Iteration:
|
case BenchmarkReporter::Run::RT_Iteration:
|
||||||
|
|
|
@ -83,7 +83,7 @@ BenchmarkReporter::Context::Context()
|
||||||
: cpu_info(CPUInfo::Get()), sys_info(SystemInfo::Get()) {}
|
: cpu_info(CPUInfo::Get()), sys_info(SystemInfo::Get()) {}
|
||||||
|
|
||||||
std::string BenchmarkReporter::Run::benchmark_name() const {
|
std::string BenchmarkReporter::Run::benchmark_name() const {
|
||||||
std::string name = run_name;
|
std::string name = run_name.str();
|
||||||
if (run_type == RT_Aggregate) {
|
if (run_type == RT_Aggregate) {
|
||||||
name += "_" + aggregate_name;
|
name += "_" + aggregate_name;
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,7 +147,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
|
||||||
for (const auto& Stat : *reports[0].statistics) {
|
for (const auto& Stat : *reports[0].statistics) {
|
||||||
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
||||||
Run data;
|
Run data;
|
||||||
data.run_name = reports[0].benchmark_name();
|
data.run_name = reports[0].run_name;
|
||||||
data.run_type = BenchmarkReporter::Run::RT_Aggregate;
|
data.run_type = BenchmarkReporter::Run::RT_Aggregate;
|
||||||
data.aggregate_name = Stat.name_;
|
data.aggregate_name = Stat.name_;
|
||||||
data.report_label = report_label;
|
data.report_label = report_label;
|
||||||
|
|
|
@ -191,6 +191,7 @@ if (BENCHMARK_ENABLE_GTEST_TESTS)
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
add_gtest(benchmark_gtest)
|
add_gtest(benchmark_gtest)
|
||||||
|
add_gtest(benchmark_name_gtest)
|
||||||
add_gtest(statistics_gtest)
|
add_gtest(statistics_gtest)
|
||||||
add_gtest(string_util_gtest)
|
add_gtest(string_util_gtest)
|
||||||
endif(BENCHMARK_ENABLE_GTEST_TESTS)
|
endif(BENCHMARK_ENABLE_GTEST_TESTS)
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
#include "benchmark/benchmark.h"
|
||||||
|
#include "gtest/gtest.h"
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
using namespace benchmark;
|
||||||
|
using namespace benchmark::internal;
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, Empty) {
|
||||||
|
const auto name = BenchmarkName();
|
||||||
|
EXPECT_EQ(name.str(), std::string());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, FunctionName) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
EXPECT_EQ(name.str(), "function_name");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, FunctionNameAndArgs) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.args = "some_args:3/4/5";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/some_args:3/4/5");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, MinTime) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.args = "some_args:3/4";
|
||||||
|
name.min_time = "min_time:3.4s";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/some_args:3/4/min_time:3.4s");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, Iterations) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.min_time = "min_time:3.4s";
|
||||||
|
name.iterations = "iterations:42";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/min_time:3.4s/iterations:42");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, Repetitions) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.min_time = "min_time:3.4s";
|
||||||
|
name.repetitions = "repetitions:24";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/min_time:3.4s/repetitions:24");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, TimeType) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.min_time = "min_time:3.4s";
|
||||||
|
name.time_type = "hammer_time";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/min_time:3.4s/hammer_time");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, Threads) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.function_name = "function_name";
|
||||||
|
name.min_time = "min_time:3.4s";
|
||||||
|
name.threads = "threads:256";
|
||||||
|
EXPECT_EQ(name.str(), "function_name/min_time:3.4s/threads:256");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BenchmarkNameTest, TestEmptyFunctionName) {
|
||||||
|
auto name = BenchmarkName();
|
||||||
|
name.args = "first:3/second:4";
|
||||||
|
name.threads = "threads:22";
|
||||||
|
EXPECT_EQ(name.str(), "first:3/second:4/threads:22");
|
||||||
|
}
|
||||||
|
|
||||||
|
} // end namespace
|
|
@ -176,6 +176,26 @@ ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
|
||||||
ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
|
ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
|
||||||
rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
|
rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
|
||||||
|
|
||||||
|
// ========================================================================= //
|
||||||
|
// -------- Testing formatting of Complexity with captured args ------------ //
|
||||||
|
// ========================================================================= //
|
||||||
|
|
||||||
|
void BM_ComplexityCaptureArgs(benchmark::State &state, int n) {
|
||||||
|
for (auto _ : state) {
|
||||||
|
}
|
||||||
|
state.SetComplexityN(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
BENCHMARK_CAPTURE(BM_ComplexityCaptureArgs, capture_test, 100)
|
||||||
|
->Complexity(benchmark::oN)
|
||||||
|
->Ranges({{1, 2}, {3, 4}});
|
||||||
|
|
||||||
|
const std::string complexity_capture_name =
|
||||||
|
"BM_ComplexityCaptureArgs/capture_test";
|
||||||
|
|
||||||
|
ADD_COMPLEXITY_CASES(complexity_capture_name, complexity_capture_name + "_BigO",
|
||||||
|
complexity_capture_name + "_RMS", "N");
|
||||||
|
|
||||||
// ========================================================================= //
|
// ========================================================================= //
|
||||||
// --------------------------- TEST CASES END ------------------------------ //
|
// --------------------------- TEST CASES END ------------------------------ //
|
||||||
// ========================================================================= //
|
// ========================================================================= //
|
||||||
|
|
Loading…
Reference in New Issue