2015-03-06 17:35:00 +00:00
|
|
|
# Enable the tests
|
|
|
|
|
2014-10-11 21:57:45 +00:00
|
|
|
find_package(Threads REQUIRED)
|
2017-06-02 22:47:24 +00:00
|
|
|
include(CheckCXXCompilerFlag)
|
2014-10-11 21:57:45 +00:00
|
|
|
|
2017-04-18 02:49:51 +00:00
|
|
|
# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must
|
|
|
|
# strip -DNDEBUG from the default CMake flags in DEBUG mode.
|
|
|
|
string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
|
|
|
|
if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" )
|
|
|
|
add_definitions( -UNDEBUG )
|
|
|
|
add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS)
|
|
|
|
# Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines.
|
|
|
|
foreach (flags_var_to_scrub
|
|
|
|
CMAKE_CXX_FLAGS_RELEASE
|
|
|
|
CMAKE_CXX_FLAGS_RELWITHDEBINFO
|
|
|
|
CMAKE_CXX_FLAGS_MINSIZEREL
|
|
|
|
CMAKE_C_FLAGS_RELEASE
|
|
|
|
CMAKE_C_FLAGS_RELWITHDEBINFO
|
|
|
|
CMAKE_C_FLAGS_MINSIZEREL)
|
|
|
|
string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " "
|
|
|
|
"${flags_var_to_scrub}" "${${flags_var_to_scrub}}")
|
|
|
|
endforeach()
|
|
|
|
endif()
|
|
|
|
|
Add tests to verify assembler output -- Fix DoNotOptimize. (#530)
* Add tests to verify assembler output -- Fix DoNotOptimize.
For things like `DoNotOptimize`, `ClobberMemory`, and even `KeepRunning()`,
it is important exactly what assembly they generate. However, we currently
have no way to test this. Instead it must be manually validated every
time a change occurs -- including a change in compiler version.
This patch attempts to introduce a way to test the assembled output automatically.
It's mirrors how LLVM verifies compiler output, and it uses LLVM FileCheck to run
the tests in a similar way.
The tests function by generating the assembly for a test in CMake, and then
using FileCheck to verify the // CHECK lines in the source file are found
in the generated assembly.
Currently, the tests only run on 64-bit x86 systems under GCC and Clang,
and when FileCheck is found on the system.
Additionally, this patch tries to improve the code gen from DoNotOptimize.
This should probably be a separate change, but I needed something to test.
* Disable assembly tests on Bazel for now
* Link FIXME to github issue
* Fix Tests on OS X
* fix strip_asm.py to work on both Linux and OS X like targets
2018-03-23 22:10:47 +00:00
|
|
|
check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set(BENCHMARK_O3_FLAG "")
|
|
|
|
if (BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set(BENCHMARK_O3_FLAG "-O3")
|
|
|
|
endif()
|
|
|
|
|
2016-08-29 18:43:30 +00:00
|
|
|
# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
|
|
|
|
# they will break the configuration check.
|
|
|
|
if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
|
|
|
|
list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
|
|
|
|
endif()
|
|
|
|
|
2017-05-01 21:33:44 +00:00
|
|
|
add_library(output_test_helper STATIC output_test_helper.cc output_test.h)
|
2016-08-28 19:24:16 +00:00
|
|
|
|
2015-03-06 17:35:00 +00:00
|
|
|
macro(compile_benchmark_test name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
2015-03-10 03:30:14 +00:00
|
|
|
target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
|
2015-03-06 17:35:00 +00:00
|
|
|
endmacro(compile_benchmark_test)
|
|
|
|
|
2018-05-25 10:18:58 +00:00
|
|
|
macro(compile_benchmark_test_with_main name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
|
|
|
target_link_libraries(${name} benchmark_main)
|
|
|
|
endmacro(compile_benchmark_test_with_main)
|
2016-08-28 19:24:16 +00:00
|
|
|
|
|
|
|
macro(compile_output_test name)
|
2016-08-31 22:28:43 +00:00
|
|
|
add_executable(${name} "${name}.cc" output_test.h)
|
2016-08-29 18:43:30 +00:00
|
|
|
target_link_libraries(${name} output_test_helper benchmark
|
|
|
|
${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
2016-08-28 19:24:16 +00:00
|
|
|
endmacro(compile_output_test)
|
|
|
|
|
2014-04-23 07:47:07 +00:00
|
|
|
# Demonstration executable
|
2015-03-06 17:35:00 +00:00
|
|
|
compile_benchmark_test(benchmark_test)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_test(benchmark benchmark_test --benchmark_min_time=0.01)
|
2014-04-23 07:56:17 +00:00
|
|
|
|
2015-03-10 03:30:14 +00:00
|
|
|
compile_benchmark_test(filter_test)
|
2015-03-31 04:05:02 +00:00
|
|
|
macro(add_filter_test name filter expect)
|
|
|
|
add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
|
2016-05-24 06:42:11 +00:00
|
|
|
add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
|
2015-03-31 04:05:02 +00:00
|
|
|
endmacro(add_filter_test)
|
|
|
|
|
|
|
|
add_filter_test(filter_simple "Foo" 3)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_simple_negative "-Foo" 2)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_suffix "BM_.*" 4)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_suffix_negative "-BM_.*" 1)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_all ".*" 5)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_all_negative "-.*" 0)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_blank "" 5)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_blank_negative "-" 0)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_none "monkey" 0)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_none_negative "-monkey" 5)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_begin "^BM_.*" 4)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_begin2 "^N" 1)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_begin2_negative "-^N" 4)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_end ".*Ba$" 1)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
|
2015-03-12 22:03:33 +00:00
|
|
|
|
2015-03-27 03:37:26 +00:00
|
|
|
compile_benchmark_test(options_test)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_test(options_benchmarks options_test --benchmark_min_time=0.01)
|
2015-03-27 03:37:26 +00:00
|
|
|
|
2015-03-12 22:03:33 +00:00
|
|
|
compile_benchmark_test(basic_test)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_test(basic_benchmark basic_test --benchmark_min_time=0.01)
|
2015-03-18 20:34:43 +00:00
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
compile_benchmark_test(diagnostics_test)
|
|
|
|
add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01)
|
|
|
|
|
|
|
|
compile_benchmark_test(skip_with_error_test)
|
|
|
|
add_test(skip_with_error_test skip_with_error_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-05-25 06:31:20 +00:00
|
|
|
compile_benchmark_test(donotoptimize_test)
|
2017-06-02 22:47:24 +00:00
|
|
|
# Some of the issues with DoNotOptimize only occur when optimization is enabled
|
|
|
|
check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
|
|
|
|
if (BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
|
|
|
|
endif()
|
2016-05-25 06:31:20 +00:00
|
|
|
add_test(donotoptimize_test donotoptimize_test --benchmark_min_time=0.01)
|
|
|
|
|
2015-04-06 21:00:06 +00:00
|
|
|
compile_benchmark_test(fixture_test)
|
|
|
|
add_test(fixture_test fixture_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-08-02 23:22:46 +00:00
|
|
|
compile_benchmark_test(register_benchmark_test)
|
|
|
|
add_test(register_benchmark_test register_benchmark_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-02-15 11:04:19 +00:00
|
|
|
compile_benchmark_test(map_test)
|
|
|
|
add_test(map_test map_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-08-04 19:30:14 +00:00
|
|
|
compile_benchmark_test(multiple_ranges_test)
|
|
|
|
add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01)
|
|
|
|
|
2018-05-25 10:18:58 +00:00
|
|
|
compile_benchmark_test_with_main(link_main_test)
|
|
|
|
add_test(link_main_test link_main_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-08-28 19:24:16 +00:00
|
|
|
compile_output_test(reporter_output_test)
|
2016-05-27 19:34:37 +00:00
|
|
|
add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01)
|
2017-10-09 19:10:37 +00:00
|
|
|
|
|
|
|
compile_output_test(templated_fixture_test)
|
|
|
|
add_test(templated_fixture_test templated_fixture_test --benchmark_min_time=0.01)
|
2016-05-24 02:44:10 +00:00
|
|
|
|
2017-04-27 18:25:20 +00:00
|
|
|
compile_output_test(user_counters_test)
|
|
|
|
add_test(user_counters_test user_counters_test --benchmark_min_time=0.01)
|
|
|
|
|
2019-04-09 12:01:33 +00:00
|
|
|
compile_output_test(internal_threading_test)
|
|
|
|
add_test(internal_threading_test internal_threading_test --benchmark_min_time=0.01)
|
|
|
|
|
2018-09-12 13:26:17 +00:00
|
|
|
compile_output_test(report_aggregates_only_test)
|
|
|
|
add_test(report_aggregates_only_test report_aggregates_only_test --benchmark_min_time=0.01)
|
|
|
|
|
|
|
|
compile_output_test(display_aggregates_only_test)
|
|
|
|
add_test(display_aggregates_only_test display_aggregates_only_test --benchmark_min_time=0.01)
|
|
|
|
|
2017-05-02 19:33:28 +00:00
|
|
|
compile_output_test(user_counters_tabular_test)
|
|
|
|
add_test(user_counters_tabular_test user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01)
|
|
|
|
|
2018-08-29 18:11:06 +00:00
|
|
|
compile_output_test(user_counters_thousands_test)
|
|
|
|
add_test(user_counters_thousands_test user_counters_thousands_test --benchmark_min_time=0.01)
|
|
|
|
|
Track two more details about runs - the aggregate name, and run name. (#675)
This is related to @BaaMeow's work in https://github.com/google/benchmark/pull/616 but is not based on it.
Two new fields are tracked, and dumped into JSON:
* If the run is an aggregate, the aggregate's name is stored.
It can be RMS, BigO, mean, median, stddev, or any custom stat name.
* The aggregate-name-less run name is additionally stored.
I.e. not some name of the benchmark function, but the actual
name, but without the 'aggregate name' suffix.
This way one can group/filter all the runs,
and filter by the particular aggregate type.
I *might* need this for further tooling improvement.
Or maybe not.
But this is certainly worthwhile for custom tooling.
2018-09-13 12:08:15 +00:00
|
|
|
compile_output_test(memory_manager_test)
|
|
|
|
add_test(memory_manager_test memory_manager_test --benchmark_min_time=0.01)
|
|
|
|
|
2016-05-24 02:44:10 +00:00
|
|
|
check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
|
|
|
|
if (BENCHMARK_HAS_CXX03_FLAG)
|
|
|
|
compile_benchmark_test(cxx03_test)
|
|
|
|
set_target_properties(cxx03_test
|
2017-07-31 00:41:57 +00:00
|
|
|
PROPERTIES
|
|
|
|
COMPILE_FLAGS "-std=c++03")
|
|
|
|
# libstdc++ provides different definitions within <map> between dialects. When
|
|
|
|
# LTO is enabled and -Werror is specified GCC diagnoses this ODR violation
|
|
|
|
# causing the test to fail to compile. To prevent this we explicitly disable
|
|
|
|
# the warning.
|
|
|
|
check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR)
|
|
|
|
if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR)
|
|
|
|
set_target_properties(cxx03_test
|
|
|
|
PROPERTIES
|
|
|
|
LINK_FLAGS "-Wno-odr")
|
|
|
|
endif()
|
2016-05-24 02:44:10 +00:00
|
|
|
add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
|
|
|
|
endif()
|
2015-04-30 13:28:04 +00:00
|
|
|
|
2016-09-03 06:06:51 +00:00
|
|
|
# Attempt to work around flaky test failures when running on Appveyor servers.
|
|
|
|
if (DEFINED ENV{APPVEYOR})
|
2016-09-03 06:18:02 +00:00
|
|
|
set(COMPLEXITY_MIN_TIME "0.5")
|
2016-09-03 06:06:51 +00:00
|
|
|
else()
|
|
|
|
set(COMPLEXITY_MIN_TIME "0.01")
|
|
|
|
endif()
|
2016-08-28 19:24:16 +00:00
|
|
|
compile_output_test(complexity_test)
|
2016-09-03 06:06:51 +00:00
|
|
|
add_test(complexity_benchmark complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
|
2015-04-30 13:28:04 +00:00
|
|
|
|
2017-12-13 23:26:47 +00:00
|
|
|
###############################################################################
|
|
|
|
# GoogleTest Unit Tests
|
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
if (BENCHMARK_ENABLE_GTEST_TESTS)
|
|
|
|
macro(compile_gtest name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
|
|
|
if (TARGET googletest)
|
|
|
|
add_dependencies(${name} googletest)
|
|
|
|
endif()
|
2018-03-21 19:27:04 +00:00
|
|
|
if (GTEST_INCLUDE_DIRS)
|
|
|
|
target_include_directories(${name} PRIVATE ${GTEST_INCLUDE_DIRS})
|
|
|
|
endif()
|
2017-12-13 23:26:47 +00:00
|
|
|
target_link_libraries(${name} benchmark
|
2018-03-21 19:27:04 +00:00
|
|
|
${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
2017-12-13 23:26:47 +00:00
|
|
|
endmacro(compile_gtest)
|
|
|
|
|
|
|
|
macro(add_gtest name)
|
|
|
|
compile_gtest(${name})
|
|
|
|
add_test(${name} ${name})
|
|
|
|
endmacro()
|
|
|
|
|
2018-04-03 22:12:47 +00:00
|
|
|
add_gtest(benchmark_gtest)
|
2019-03-17 13:38:51 +00:00
|
|
|
add_gtest(benchmark_name_gtest)
|
2018-04-03 22:12:47 +00:00
|
|
|
add_gtest(statistics_gtest)
|
2018-06-05 10:36:26 +00:00
|
|
|
add_gtest(string_util_gtest)
|
2017-12-13 23:26:47 +00:00
|
|
|
endif(BENCHMARK_ENABLE_GTEST_TESTS)
|
|
|
|
|
Add tests to verify assembler output -- Fix DoNotOptimize. (#530)
* Add tests to verify assembler output -- Fix DoNotOptimize.
For things like `DoNotOptimize`, `ClobberMemory`, and even `KeepRunning()`,
it is important exactly what assembly they generate. However, we currently
have no way to test this. Instead it must be manually validated every
time a change occurs -- including a change in compiler version.
This patch attempts to introduce a way to test the assembled output automatically.
It's mirrors how LLVM verifies compiler output, and it uses LLVM FileCheck to run
the tests in a similar way.
The tests function by generating the assembly for a test in CMake, and then
using FileCheck to verify the // CHECK lines in the source file are found
in the generated assembly.
Currently, the tests only run on 64-bit x86 systems under GCC and Clang,
and when FileCheck is found on the system.
Additionally, this patch tries to improve the code gen from DoNotOptimize.
This should probably be a separate change, but I needed something to test.
* Disable assembly tests on Bazel for now
* Link FIXME to github issue
* Fix Tests on OS X
* fix strip_asm.py to work on both Linux and OS X like targets
2018-03-23 22:10:47 +00:00
|
|
|
###############################################################################
|
|
|
|
# Assembly Unit Tests
|
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
if (BENCHMARK_ENABLE_ASSEMBLY_TESTS)
|
|
|
|
if (NOT LLVM_FILECHECK_EXE)
|
|
|
|
message(FATAL_ERROR "LLVM FileCheck is required when including this file")
|
|
|
|
endif()
|
|
|
|
include(AssemblyTests.cmake)
|
|
|
|
add_filecheck_test(donotoptimize_assembly_test)
|
|
|
|
add_filecheck_test(state_assembly_test)
|
|
|
|
add_filecheck_test(clobber_memory_assembly_test)
|
|
|
|
endif()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
###############################################################################
|
|
|
|
# Code Coverage Configuration
|
|
|
|
###############################################################################
|
2017-12-13 23:26:47 +00:00
|
|
|
|
2015-04-30 13:28:04 +00:00
|
|
|
# Add the coverage command(s)
|
2015-05-22 21:15:19 +00:00
|
|
|
if(CMAKE_BUILD_TYPE)
|
|
|
|
string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
|
|
|
|
endif()
|
2015-04-30 13:28:04 +00:00
|
|
|
if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
|
|
|
|
find_program(GCOV gcov)
|
|
|
|
find_program(LCOV lcov)
|
|
|
|
find_program(GENHTML genhtml)
|
|
|
|
find_program(CTEST ctest)
|
|
|
|
if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE)
|
|
|
|
add_custom_command(
|
|
|
|
OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html
|
|
|
|
COMMAND ${LCOV} -q -z -d .
|
|
|
|
COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i
|
|
|
|
COMMAND ${CTEST} --force-new-ctest-process
|
|
|
|
COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov
|
|
|
|
COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov
|
|
|
|
COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov
|
|
|
|
COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark
|
2016-05-18 17:59:34 +00:00
|
|
|
DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test
|
2015-04-30 13:28:04 +00:00
|
|
|
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
|
|
|
COMMENT "Running LCOV"
|
|
|
|
)
|
|
|
|
add_custom_target(coverage
|
|
|
|
DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html
|
|
|
|
COMMENT "LCOV report at lcov/index.html"
|
|
|
|
)
|
|
|
|
message(STATUS "Coverage command added")
|
|
|
|
else()
|
|
|
|
if (HAVE_CXX_FLAG_COVERAGE)
|
|
|
|
set(CXX_FLAG_COVERAGE_MESSAGE supported)
|
|
|
|
else()
|
|
|
|
set(CXX_FLAG_COVERAGE_MESSAGE unavailable)
|
|
|
|
endif()
|
|
|
|
message(WARNING
|
|
|
|
"Coverage not available:\n"
|
|
|
|
" gcov: ${GCOV}\n"
|
|
|
|
" lcov: ${LCOV}\n"
|
|
|
|
" genhtml: ${GENHTML}\n"
|
|
|
|
" ctest: ${CTEST}\n"
|
|
|
|
" --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}")
|
|
|
|
endif()
|
|
|
|
endif()
|