mirror of https://github.com/google/benchmark.git
Support windows MSYS2 environments (#1704)
* [CI] Attempt to add windows MSYS2-based coverage * Mark decl of `State::KeepRunningInternal()` as `inline` Maybe helps with ``` D:\a\_temp\msys64\ucrt64\bin\g++.exe -DHAVE_STD_REGEX -DHAVE_STEADY_CLOCK -DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS -ID:/a/benchmark/benchmark/include -Wall -Wextra -Wshadow -Wfloat-equal -Wold-style-cast -Werror -pedantic -pedantic-errors -fstrict-aliasing -Wno-deprecated-declarations -Wno-deprecated -Wstrict-aliasing -Wno-unused-variable -std=c++11 -fvisibility=hidden -fno-keep-inline-dllexport -UNDEBUG -MD -MT test/CMakeFiles/benchmark_test.dir/benchmark_test.cc.obj -MF test\CMakeFiles\benchmark_test.dir\benchmark_test.cc.obj.d -o test/CMakeFiles/benchmark_test.dir/benchmark_test.cc.obj -c D:/a/benchmark/benchmark/test/benchmark_test.cc In file included from D:/a/benchmark/benchmark/test/benchmark_test.cc:1: D:/a/benchmark/benchmark/include/benchmark/benchmark.h:1007:37: error: 'bool benchmark::State::KeepRunningInternal(benchmark::IterationCount, bool)' redeclared without dllimport attribute after being referenced with dll linkage [-Werror] 1007 | inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunningInternal(IterationCount n, | ^~~~~ ``` * Mark more `State`'s member function decls as `inline` ``` [27/110] Building CXX object test/CMakeFiles/spec_arg_verbosity_test.dir/spec_arg_verbosity_test.cc.obj FAILED: test/CMakeFiles/spec_arg_verbosity_test.dir/spec_arg_verbosity_test.cc.obj D:\a\_temp\msys64\clang32\bin\clang++.exe -DHAVE_STD_REGEX -DHAVE_STEADY_CLOCK -DHAVE_THREAD_SAFETY_ATTRIBUTES -DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS -ID:/a/benchmark/benchmark/include -Wall -Wextra -Wshadow -Wfloat-equal -Wold-style-cast -Werror -pedantic -pedantic-errors -Wshorten-64-to-32 -fstrict-aliasing -Wno-deprecated-declarations -Wno-deprecated -Wstrict-aliasing -Wthread-safety -Wno-unused-variable -std=c++11 -fvisibility=hidden -fvisibility-inlines-hidden -UNDEBUG -MD -MT test/CMakeFiles/spec_arg_verbosity_test.dir/spec_arg_verbosity_test.cc.obj -MF test\CMakeFiles\spec_arg_verbosity_test.dir\spec_arg_verbosity_test.cc.obj.d -o test/CMakeFiles/spec_arg_verbosity_test.dir/spec_arg_verbosity_test.cc.obj -c D:/a/benchmark/benchmark/test/spec_arg_verbosity_test.cc In file included from D:/a/benchmark/benchmark/test/spec_arg_verbosity_test.cc:5: D:/a/benchmark/benchmark/include/benchmark/benchmark.h:999:44: error: 'benchmark::State::KeepRunning' redeclared inline; 'dllimport' attribute ignored [-Werror,-Wignored-attributes] 999 | inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunning() { | ^ D:/a/benchmark/benchmark/include/benchmark/benchmark.h:1003:44: error: 'benchmark::State::KeepRunningBatch' redeclared inline; 'dllimport' attribute ignored [-Werror,-Wignored-attributes] 1003 | inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunningBatch(IterationCount n) { | ^ D:/a/benchmark/benchmark/include/benchmark/benchmark.h:1075:60: error: 'benchmark::State::begin' redeclared inline; 'dllimport' attribute ignored [-Werror,-Wignored-attributes] 1075 | inline BENCHMARK_ALWAYS_INLINE State::StateIterator State::begin() { | ^ D:/a/benchmark/benchmark/include/benchmark/benchmark.h:1078:60: error: 'benchmark::State::end' redeclared inline; 'dllimport' attribute ignored [-Werror,-Wignored-attributes] 1078 | inline BENCHMARK_ALWAYS_INLINE State::StateIterator State::end() { | ^ ``` * StatisticsTest.CV: don't require precise FP match, tolerate some abs error We get ever so slightly different results on windows with GCC. ``` 71: Test command: D:\a\benchmark\benchmark\_build\test\statistics_gtest.exe 71: Working Directory: D:/a/benchmark/benchmark/_build/test 71: Test timeout computed to be: 10000000 71: Running main() from gmock_main.cc 71: [==========] Running 4 tests from 1 test suite. 71: [----------] Global test environment set-up. 71: [----------] 4 tests from StatisticsTest 71: [ RUN ] StatisticsTest.Mean 71: [ OK ] StatisticsTest.Mean (0 ms) 71: [ RUN ] StatisticsTest.Median 71: [ OK ] StatisticsTest.Median (0 ms) 71: [ RUN ] StatisticsTest.StdDev 71: [ OK ] StatisticsTest.StdDev (0 ms) 71: [ RUN ] StatisticsTest.CV 71: D:/a/benchmark/benchmark/test/statistics_gtest.cc:31: Failure 71: Expected equality of these values: 71: benchmark::StatisticsCV({2.5, 2.4, 3.3, 4.2, 5.1}) 71: Which is: 0.32888184094918088 71: 0.32888184094918121 71: [ FAILED ] StatisticsTest.CV (0 ms) 71: [----------] 4 tests from StatisticsTest (0 ms total) ``` * Fix DLL path discovery for tests
This commit is contained in:
parent
c8ef1ee99e
commit
1e96bb0ab5
|
@ -102,13 +102,60 @@ jobs:
|
||||||
- name: build
|
- name: build
|
||||||
run: cmake --build _build/ --config ${{ matrix.build_type }}
|
run: cmake --build _build/ --config ${{ matrix.build_type }}
|
||||||
|
|
||||||
- name: setup test environment
|
|
||||||
# Make sure gmock and benchmark DLLs can be found
|
|
||||||
run: >
|
|
||||||
echo "$((Get-Item .).FullName)/_build/bin/${{ matrix.build_type }}" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append;
|
|
||||||
echo "$((Get-Item .).FullName)/_build/src/${{ matrix.build_type }}" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append;
|
|
||||||
|
|
||||||
- name: test
|
- name: test
|
||||||
run: ctest --test-dir _build/ -C ${{ matrix.build_type }} -VV
|
run: ctest --test-dir _build/ -C ${{ matrix.build_type }} -VV
|
||||||
|
|
||||||
|
msys2:
|
||||||
|
name: ${{ matrix.os }}.${{ matrix.build_type }}.${{ matrix.lib }}.${{ matrix.msys2.msystem }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: msys2 {0}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ windows-latest ]
|
||||||
|
msys2:
|
||||||
|
- { msystem: MINGW64, arch: x86_64, family: GNU, compiler: g++ }
|
||||||
|
- { msystem: MINGW32, arch: i686, family: GNU, compiler: g++ }
|
||||||
|
- { msystem: CLANG64, arch: x86_64, family: LLVM, compiler: clang++ }
|
||||||
|
- { msystem: CLANG32, arch: i686, family: LLVM, compiler: clang++ }
|
||||||
|
- { msystem: UCRT64, arch: x86_64, family: GNU, compiler: g++ }
|
||||||
|
build_type:
|
||||||
|
- Debug
|
||||||
|
- Release
|
||||||
|
lib:
|
||||||
|
- shared
|
||||||
|
- static
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install Base Dependencies
|
||||||
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
cache: false
|
||||||
|
msystem: ${{ matrix.msys2.msystem }}
|
||||||
|
update: true
|
||||||
|
install: >-
|
||||||
|
git
|
||||||
|
base-devel
|
||||||
|
pacboy: >-
|
||||||
|
cc:p
|
||||||
|
cmake:p
|
||||||
|
ninja:p
|
||||||
|
|
||||||
|
- name: configure cmake
|
||||||
|
env:
|
||||||
|
CXX: ${{ matrix.msys2.compiler }}
|
||||||
|
run: >
|
||||||
|
cmake -S . -B _build/
|
||||||
|
-GNinja
|
||||||
|
-DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON
|
||||||
|
-DBUILD_SHARED_LIBS=${{ matrix.lib == 'shared' }}
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
run: cmake --build _build/ --config ${{ matrix.build_type }}
|
||||||
|
|
||||||
|
- name: test
|
||||||
|
run: ctest --test-dir _build/ -C ${{ matrix.build_type }} -VV
|
||||||
|
|
|
@ -746,13 +746,13 @@ class BENCHMARK_EXPORT State {
|
||||||
// have been called previously.
|
// have been called previously.
|
||||||
//
|
//
|
||||||
// NOTE: KeepRunning may not be used after calling either of these functions.
|
// NOTE: KeepRunning may not be used after calling either of these functions.
|
||||||
BENCHMARK_ALWAYS_INLINE StateIterator begin();
|
inline BENCHMARK_ALWAYS_INLINE StateIterator begin();
|
||||||
BENCHMARK_ALWAYS_INLINE StateIterator end();
|
inline BENCHMARK_ALWAYS_INLINE StateIterator end();
|
||||||
|
|
||||||
// Returns true if the benchmark should continue through another iteration.
|
// Returns true if the benchmark should continue through another iteration.
|
||||||
// NOTE: A benchmark may not return from the test until KeepRunning() has
|
// NOTE: A benchmark may not return from the test until KeepRunning() has
|
||||||
// returned false.
|
// returned false.
|
||||||
bool KeepRunning();
|
inline bool KeepRunning();
|
||||||
|
|
||||||
// Returns true iff the benchmark should run n more iterations.
|
// Returns true iff the benchmark should run n more iterations.
|
||||||
// REQUIRES: 'n' > 0.
|
// REQUIRES: 'n' > 0.
|
||||||
|
@ -764,7 +764,7 @@ class BENCHMARK_EXPORT State {
|
||||||
// while (state.KeepRunningBatch(1000)) {
|
// while (state.KeepRunningBatch(1000)) {
|
||||||
// // process 1000 elements
|
// // process 1000 elements
|
||||||
// }
|
// }
|
||||||
bool KeepRunningBatch(IterationCount n);
|
inline bool KeepRunningBatch(IterationCount n);
|
||||||
|
|
||||||
// REQUIRES: timer is running and 'SkipWithMessage(...)' or
|
// REQUIRES: timer is running and 'SkipWithMessage(...)' or
|
||||||
// 'SkipWithError(...)' has not been called by the current thread.
|
// 'SkipWithError(...)' has not been called by the current thread.
|
||||||
|
@ -982,7 +982,7 @@ class BENCHMARK_EXPORT State {
|
||||||
void StartKeepRunning();
|
void StartKeepRunning();
|
||||||
// Implementation of KeepRunning() and KeepRunningBatch().
|
// Implementation of KeepRunning() and KeepRunningBatch().
|
||||||
// is_batch must be true unless n is 1.
|
// is_batch must be true unless n is 1.
|
||||||
bool KeepRunningInternal(IterationCount n, bool is_batch);
|
inline bool KeepRunningInternal(IterationCount n, bool is_batch);
|
||||||
void FinishKeepRunning();
|
void FinishKeepRunning();
|
||||||
|
|
||||||
const std::string name_;
|
const std::string name_;
|
||||||
|
|
|
@ -64,30 +64,38 @@ macro(compile_output_test name)
|
||||||
${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
||||||
endmacro(compile_output_test)
|
endmacro(compile_output_test)
|
||||||
|
|
||||||
|
macro(benchmark_add_test)
|
||||||
|
add_test(${ARGV})
|
||||||
|
if(WIN32 AND BUILD_SHARED_LIBS)
|
||||||
|
cmake_parse_arguments(TEST "" "NAME" "" ${ARGN})
|
||||||
|
set_tests_properties(${TEST_NAME} PROPERTIES ENVIRONMENT_MODIFICATION "PATH=path_list_prepend:$<TARGET_FILE_DIR:benchmark::benchmark>")
|
||||||
|
endif()
|
||||||
|
endmacro(compile_output_test)
|
||||||
|
|
||||||
# Demonstration executable
|
# Demonstration executable
|
||||||
compile_benchmark_test(benchmark_test)
|
compile_benchmark_test(benchmark_test)
|
||||||
add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(spec_arg_test)
|
compile_benchmark_test(spec_arg_test)
|
||||||
add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen)
|
benchmark_add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen)
|
||||||
|
|
||||||
compile_benchmark_test(spec_arg_verbosity_test)
|
compile_benchmark_test(spec_arg_verbosity_test)
|
||||||
add_test(NAME spec_arg_verbosity COMMAND spec_arg_verbosity_test --v=42)
|
benchmark_add_test(NAME spec_arg_verbosity COMMAND spec_arg_verbosity_test --v=42)
|
||||||
|
|
||||||
compile_benchmark_test(benchmark_setup_teardown_test)
|
compile_benchmark_test(benchmark_setup_teardown_test)
|
||||||
add_test(NAME benchmark_setup_teardown COMMAND benchmark_setup_teardown_test)
|
benchmark_add_test(NAME benchmark_setup_teardown COMMAND benchmark_setup_teardown_test)
|
||||||
|
|
||||||
compile_benchmark_test(filter_test)
|
compile_benchmark_test(filter_test)
|
||||||
macro(add_filter_test name filter expect)
|
macro(add_filter_test name filter expect)
|
||||||
add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01s --benchmark_filter=${filter} ${expect})
|
benchmark_add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01s --benchmark_filter=${filter} ${expect})
|
||||||
add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
|
benchmark_add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
|
||||||
endmacro(add_filter_test)
|
endmacro(add_filter_test)
|
||||||
|
|
||||||
compile_benchmark_test(benchmark_min_time_flag_time_test)
|
compile_benchmark_test(benchmark_min_time_flag_time_test)
|
||||||
add_test(NAME min_time_flag_time COMMAND benchmark_min_time_flag_time_test)
|
benchmark_add_test(NAME min_time_flag_time COMMAND benchmark_min_time_flag_time_test)
|
||||||
|
|
||||||
compile_benchmark_test(benchmark_min_time_flag_iters_test)
|
compile_benchmark_test(benchmark_min_time_flag_iters_test)
|
||||||
add_test(NAME min_time_flag_iters COMMAND benchmark_min_time_flag_iters_test)
|
benchmark_add_test(NAME min_time_flag_iters COMMAND benchmark_min_time_flag_iters_test)
|
||||||
|
|
||||||
add_filter_test(filter_simple "Foo" 3)
|
add_filter_test(filter_simple "Foo" 3)
|
||||||
add_filter_test(filter_simple_negative "-Foo" 2)
|
add_filter_test(filter_simple_negative "-Foo" 2)
|
||||||
|
@ -109,19 +117,19 @@ add_filter_test(filter_regex_end ".*Ba$" 1)
|
||||||
add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
|
add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
|
||||||
|
|
||||||
compile_benchmark_test(options_test)
|
compile_benchmark_test(options_test)
|
||||||
add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(basic_test)
|
compile_benchmark_test(basic_test)
|
||||||
add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(repetitions_test)
|
compile_output_test(repetitions_test)
|
||||||
add_test(NAME repetitions_benchmark COMMAND repetitions_test --benchmark_min_time=0.01s --benchmark_repetitions=3)
|
benchmark_add_test(NAME repetitions_benchmark COMMAND repetitions_test --benchmark_min_time=0.01s --benchmark_repetitions=3)
|
||||||
|
|
||||||
compile_benchmark_test(diagnostics_test)
|
compile_benchmark_test(diagnostics_test)
|
||||||
add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(skip_with_error_test)
|
compile_benchmark_test(skip_with_error_test)
|
||||||
add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(donotoptimize_test)
|
compile_benchmark_test(donotoptimize_test)
|
||||||
# Enable errors for deprecated deprecations (DoNotOptimize(Tp const& value)).
|
# Enable errors for deprecated deprecations (DoNotOptimize(Tp const& value)).
|
||||||
|
@ -134,55 +142,55 @@ check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
|
||||||
if (BENCHMARK_HAS_O3_FLAG)
|
if (BENCHMARK_HAS_O3_FLAG)
|
||||||
set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
|
set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
|
||||||
endif()
|
endif()
|
||||||
add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(fixture_test)
|
compile_benchmark_test(fixture_test)
|
||||||
add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(register_benchmark_test)
|
compile_benchmark_test(register_benchmark_test)
|
||||||
add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(map_test)
|
compile_benchmark_test(map_test)
|
||||||
add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(multiple_ranges_test)
|
compile_benchmark_test(multiple_ranges_test)
|
||||||
add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test(args_product_test)
|
compile_benchmark_test(args_product_test)
|
||||||
add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_benchmark_test_with_main(link_main_test)
|
compile_benchmark_test_with_main(link_main_test)
|
||||||
add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(reporter_output_test)
|
compile_output_test(reporter_output_test)
|
||||||
add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(templated_fixture_test)
|
compile_output_test(templated_fixture_test)
|
||||||
add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(user_counters_test)
|
compile_output_test(user_counters_test)
|
||||||
add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(perf_counters_test)
|
compile_output_test(perf_counters_test)
|
||||||
add_test(NAME perf_counters_test COMMAND perf_counters_test --benchmark_min_time=0.01s --benchmark_perf_counters=CYCLES,INSTRUCTIONS)
|
benchmark_add_test(NAME perf_counters_test COMMAND perf_counters_test --benchmark_min_time=0.01s --benchmark_perf_counters=CYCLES,INSTRUCTIONS)
|
||||||
|
|
||||||
compile_output_test(internal_threading_test)
|
compile_output_test(internal_threading_test)
|
||||||
add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(report_aggregates_only_test)
|
compile_output_test(report_aggregates_only_test)
|
||||||
add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(display_aggregates_only_test)
|
compile_output_test(display_aggregates_only_test)
|
||||||
add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(user_counters_tabular_test)
|
compile_output_test(user_counters_tabular_test)
|
||||||
add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(user_counters_thousands_test)
|
compile_output_test(user_counters_thousands_test)
|
||||||
add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
compile_output_test(memory_manager_test)
|
compile_output_test(memory_manager_test)
|
||||||
add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01s)
|
||||||
|
|
||||||
# MSVC does not allow to set the language standard to C++98/03.
|
# MSVC does not allow to set the language standard to C++98/03.
|
||||||
if(NOT (MSVC OR CMAKE_CXX_SIMULATE_ID STREQUAL "MSVC"))
|
if(NOT (MSVC OR CMAKE_CXX_SIMULATE_ID STREQUAL "MSVC"))
|
||||||
|
@ -207,7 +215,7 @@ if(NOT (MSVC OR CMAKE_CXX_SIMULATE_ID STREQUAL "MSVC"))
|
||||||
set(DISABLE_LTO_WARNINGS "${DISABLE_LTO_WARNINGS} -Wno-lto-type-mismatch")
|
set(DISABLE_LTO_WARNINGS "${DISABLE_LTO_WARNINGS} -Wno-lto-type-mismatch")
|
||||||
endif()
|
endif()
|
||||||
set_target_properties(cxx03_test PROPERTIES LINK_FLAGS "${DISABLE_LTO_WARNINGS}")
|
set_target_properties(cxx03_test PROPERTIES LINK_FLAGS "${DISABLE_LTO_WARNINGS}")
|
||||||
add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01s)
|
benchmark_add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01s)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Attempt to work around flaky test failures when running on Appveyor servers.
|
# Attempt to work around flaky test failures when running on Appveyor servers.
|
||||||
|
@ -217,7 +225,7 @@ else()
|
||||||
set(COMPLEXITY_MIN_TIME "0.01s")
|
set(COMPLEXITY_MIN_TIME "0.01s")
|
||||||
endif()
|
endif()
|
||||||
compile_output_test(complexity_test)
|
compile_output_test(complexity_test)
|
||||||
add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
|
benchmark_add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
# GoogleTest Unit Tests
|
# GoogleTest Unit Tests
|
||||||
|
@ -232,7 +240,12 @@ if (BENCHMARK_ENABLE_GTEST_TESTS)
|
||||||
|
|
||||||
macro(add_gtest name)
|
macro(add_gtest name)
|
||||||
compile_gtest(${name})
|
compile_gtest(${name})
|
||||||
add_test(NAME ${name} COMMAND ${name})
|
benchmark_add_test(NAME ${name} COMMAND ${name})
|
||||||
|
if(WIN32 AND BUILD_SHARED_LIBS)
|
||||||
|
set_tests_properties(${name} PROPERTIES
|
||||||
|
ENVIRONMENT_MODIFICATION "PATH=path_list_prepend:$<TARGET_FILE_DIR:benchmark::benchmark>;PATH=path_list_prepend:$<TARGET_FILE_DIR:gmock_main>"
|
||||||
|
)
|
||||||
|
endif()
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
add_gtest(benchmark_gtest)
|
add_gtest(benchmark_gtest)
|
||||||
|
|
|
@ -28,8 +28,8 @@ TEST(StatisticsTest, StdDev) {
|
||||||
TEST(StatisticsTest, CV) {
|
TEST(StatisticsTest, CV) {
|
||||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsCV({101, 101, 101, 101}), 0.0);
|
EXPECT_DOUBLE_EQ(benchmark::StatisticsCV({101, 101, 101, 101}), 0.0);
|
||||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsCV({1, 2, 3}), 1. / 2.);
|
EXPECT_DOUBLE_EQ(benchmark::StatisticsCV({1, 2, 3}), 1. / 2.);
|
||||||
EXPECT_DOUBLE_EQ(benchmark::StatisticsCV({2.5, 2.4, 3.3, 4.2, 5.1}),
|
ASSERT_NEAR(benchmark::StatisticsCV({2.5, 2.4, 3.3, 4.2, 5.1}),
|
||||||
0.32888184094918121);
|
0.32888184094918121, 1e-15);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // end namespace
|
} // end namespace
|
||||||
|
|
Loading…
Reference in New Issue