2015-03-06 17:35:00 +00:00
|
|
|
# Enable the tests
|
|
|
|
|
2014-10-11 21:57:45 +00:00
|
|
|
find_package(Threads REQUIRED)
|
2017-06-02 22:47:24 +00:00
|
|
|
include(CheckCXXCompilerFlag)
|
2014-10-11 21:57:45 +00:00
|
|
|
|
2017-04-18 02:49:51 +00:00
|
|
|
# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must
|
|
|
|
# strip -DNDEBUG from the default CMake flags in DEBUG mode.
|
|
|
|
string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
|
|
|
|
if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" )
|
|
|
|
add_definitions( -UNDEBUG )
|
|
|
|
add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS)
|
|
|
|
# Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines.
|
|
|
|
foreach (flags_var_to_scrub
|
|
|
|
CMAKE_CXX_FLAGS_RELEASE
|
|
|
|
CMAKE_CXX_FLAGS_RELWITHDEBINFO
|
|
|
|
CMAKE_CXX_FLAGS_MINSIZEREL
|
|
|
|
CMAKE_C_FLAGS_RELEASE
|
|
|
|
CMAKE_C_FLAGS_RELWITHDEBINFO
|
|
|
|
CMAKE_C_FLAGS_MINSIZEREL)
|
|
|
|
string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " "
|
|
|
|
"${flags_var_to_scrub}" "${${flags_var_to_scrub}}")
|
|
|
|
endforeach()
|
|
|
|
endif()
|
|
|
|
|
Add tests to verify assembler output -- Fix DoNotOptimize. (#530)
* Add tests to verify assembler output -- Fix DoNotOptimize.
For things like `DoNotOptimize`, `ClobberMemory`, and even `KeepRunning()`,
it is important exactly what assembly they generate. However, we currently
have no way to test this. Instead it must be manually validated every
time a change occurs -- including a change in compiler version.
This patch attempts to introduce a way to test the assembled output automatically.
It's mirrors how LLVM verifies compiler output, and it uses LLVM FileCheck to run
the tests in a similar way.
The tests function by generating the assembly for a test in CMake, and then
using FileCheck to verify the // CHECK lines in the source file are found
in the generated assembly.
Currently, the tests only run on 64-bit x86 systems under GCC and Clang,
and when FileCheck is found on the system.
Additionally, this patch tries to improve the code gen from DoNotOptimize.
This should probably be a separate change, but I needed something to test.
* Disable assembly tests on Bazel for now
* Link FIXME to github issue
* Fix Tests on OS X
* fix strip_asm.py to work on both Linux and OS X like targets
2018-03-23 22:10:47 +00:00
|
|
|
check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set(BENCHMARK_O3_FLAG "")
|
|
|
|
if (BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set(BENCHMARK_O3_FLAG "-O3")
|
|
|
|
endif()
|
|
|
|
|
2016-08-29 18:43:30 +00:00
|
|
|
# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
|
|
|
|
# they will break the configuration check.
|
|
|
|
if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
|
|
|
|
list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
|
|
|
|
endif()
|
|
|
|
|
2017-05-01 21:33:44 +00:00
|
|
|
add_library(output_test_helper STATIC output_test_helper.cc output_test.h)
|
2016-08-28 19:24:16 +00:00
|
|
|
|
2015-03-06 17:35:00 +00:00
|
|
|
macro(compile_benchmark_test name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
2020-01-14 20:21:24 +00:00
|
|
|
target_link_libraries(${name} benchmark::benchmark ${CMAKE_THREAD_LIBS_INIT})
|
2015-03-06 17:35:00 +00:00
|
|
|
endmacro(compile_benchmark_test)
|
|
|
|
|
2018-05-25 10:18:58 +00:00
|
|
|
macro(compile_benchmark_test_with_main name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
2020-01-14 20:21:24 +00:00
|
|
|
target_link_libraries(${name} benchmark::benchmark_main)
|
2018-05-25 10:18:58 +00:00
|
|
|
endmacro(compile_benchmark_test_with_main)
|
2016-08-28 19:24:16 +00:00
|
|
|
|
|
|
|
macro(compile_output_test name)
|
2016-08-31 22:28:43 +00:00
|
|
|
add_executable(${name} "${name}.cc" output_test.h)
|
2020-01-14 20:21:24 +00:00
|
|
|
target_link_libraries(${name} output_test_helper benchmark::benchmark
|
2016-08-29 18:43:30 +00:00
|
|
|
${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
2016-08-28 19:24:16 +00:00
|
|
|
endmacro(compile_output_test)
|
|
|
|
|
2014-04-23 07:47:07 +00:00
|
|
|
# Demonstration executable
|
2015-03-06 17:35:00 +00:00
|
|
|
compile_benchmark_test(benchmark_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01)
|
2014-04-23 07:56:17 +00:00
|
|
|
|
2021-10-27 07:52:57 +00:00
|
|
|
compile_benchmark_test(spec_arg_test)
|
|
|
|
add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen)
|
|
|
|
|
2015-03-10 03:30:14 +00:00
|
|
|
compile_benchmark_test(filter_test)
|
2015-03-31 04:05:02 +00:00
|
|
|
macro(add_filter_test name filter expect)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
|
|
|
|
add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
|
2015-03-31 04:05:02 +00:00
|
|
|
endmacro(add_filter_test)
|
|
|
|
|
|
|
|
add_filter_test(filter_simple "Foo" 3)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_simple_negative "-Foo" 2)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_suffix "BM_.*" 4)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_suffix_negative "-BM_.*" 1)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_all ".*" 5)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_all_negative "-.*" 0)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_blank "" 5)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_blank_negative "-" 0)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_none "monkey" 0)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_none_negative "-monkey" 5)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_begin "^BM_.*" 4)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_begin2 "^N" 1)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_begin2_negative "-^N" 4)
|
2015-03-31 04:05:02 +00:00
|
|
|
add_filter_test(filter_regex_end ".*Ba$" 1)
|
2018-04-26 09:56:06 +00:00
|
|
|
add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
|
2015-03-12 22:03:33 +00:00
|
|
|
|
2015-03-27 03:37:26 +00:00
|
|
|
compile_benchmark_test(options_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01)
|
2015-03-27 03:37:26 +00:00
|
|
|
|
2015-03-12 22:03:33 +00:00
|
|
|
compile_benchmark_test(basic_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01)
|
2015-03-18 20:34:43 +00:00
|
|
|
|
2021-06-02 09:34:00 +00:00
|
|
|
compile_output_test(repetitions_test)
|
|
|
|
add_test(NAME repetitions_benchmark COMMAND repetitions_test --benchmark_min_time=0.01 --benchmark_repetitions=3)
|
|
|
|
|
2016-05-24 01:24:56 +00:00
|
|
|
compile_benchmark_test(diagnostics_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01)
|
2016-05-24 01:24:56 +00:00
|
|
|
|
|
|
|
compile_benchmark_test(skip_with_error_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01)
|
2016-05-24 01:24:56 +00:00
|
|
|
|
2016-05-25 06:31:20 +00:00
|
|
|
compile_benchmark_test(donotoptimize_test)
|
2017-06-02 22:47:24 +00:00
|
|
|
# Some of the issues with DoNotOptimize only occur when optimization is enabled
|
|
|
|
check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
|
|
|
|
if (BENCHMARK_HAS_O3_FLAG)
|
|
|
|
set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
|
|
|
|
endif()
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01)
|
2016-05-25 06:31:20 +00:00
|
|
|
|
2015-04-06 21:00:06 +00:00
|
|
|
compile_benchmark_test(fixture_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01)
|
2015-04-06 21:00:06 +00:00
|
|
|
|
2016-08-02 23:22:46 +00:00
|
|
|
compile_benchmark_test(register_benchmark_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01)
|
2016-08-02 23:22:46 +00:00
|
|
|
|
2016-02-15 11:04:19 +00:00
|
|
|
compile_benchmark_test(map_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01)
|
2016-02-15 11:04:19 +00:00
|
|
|
|
2016-08-04 19:30:14 +00:00
|
|
|
compile_benchmark_test(multiple_ranges_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01)
|
2016-08-04 19:30:14 +00:00
|
|
|
|
2020-08-25 12:47:44 +00:00
|
|
|
compile_benchmark_test(args_product_test)
|
|
|
|
add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01)
|
|
|
|
|
2018-05-25 10:18:58 +00:00
|
|
|
compile_benchmark_test_with_main(link_main_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01)
|
2018-05-25 10:18:58 +00:00
|
|
|
|
2016-08-28 19:24:16 +00:00
|
|
|
compile_output_test(reporter_output_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01)
|
2017-10-09 19:10:37 +00:00
|
|
|
|
|
|
|
compile_output_test(templated_fixture_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01)
|
2016-05-24 02:44:10 +00:00
|
|
|
|
2017-04-27 18:25:20 +00:00
|
|
|
compile_output_test(user_counters_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01)
|
2017-04-27 18:25:20 +00:00
|
|
|
|
2021-04-28 08:25:29 +00:00
|
|
|
compile_output_test(perf_counters_test)
|
|
|
|
add_test(NAME perf_counters_test COMMAND perf_counters_test --benchmark_min_time=0.01 --benchmark_perf_counters=CYCLES,BRANCHES)
|
|
|
|
|
2019-04-09 12:01:33 +00:00
|
|
|
compile_output_test(internal_threading_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01)
|
2019-04-09 12:01:33 +00:00
|
|
|
|
2018-09-12 13:26:17 +00:00
|
|
|
compile_output_test(report_aggregates_only_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01)
|
2018-09-12 13:26:17 +00:00
|
|
|
|
|
|
|
compile_output_test(display_aggregates_only_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01)
|
2018-09-12 13:26:17 +00:00
|
|
|
|
2017-05-02 19:33:28 +00:00
|
|
|
compile_output_test(user_counters_tabular_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01)
|
2017-05-02 19:33:28 +00:00
|
|
|
|
2018-08-29 18:11:06 +00:00
|
|
|
compile_output_test(user_counters_thousands_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01)
|
2018-08-29 18:11:06 +00:00
|
|
|
|
Track two more details about runs - the aggregate name, and run name. (#675)
This is related to @BaaMeow's work in https://github.com/google/benchmark/pull/616 but is not based on it.
Two new fields are tracked, and dumped into JSON:
* If the run is an aggregate, the aggregate's name is stored.
It can be RMS, BigO, mean, median, stddev, or any custom stat name.
* The aggregate-name-less run name is additionally stored.
I.e. not some name of the benchmark function, but the actual
name, but without the 'aggregate name' suffix.
This way one can group/filter all the runs,
and filter by the particular aggregate type.
I *might* need this for further tooling improvement.
Or maybe not.
But this is certainly worthwhile for custom tooling.
2018-09-13 12:08:15 +00:00
|
|
|
compile_output_test(memory_manager_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01)
|
Track two more details about runs - the aggregate name, and run name. (#675)
This is related to @BaaMeow's work in https://github.com/google/benchmark/pull/616 but is not based on it.
Two new fields are tracked, and dumped into JSON:
* If the run is an aggregate, the aggregate's name is stored.
It can be RMS, BigO, mean, median, stddev, or any custom stat name.
* The aggregate-name-less run name is additionally stored.
I.e. not some name of the benchmark function, but the actual
name, but without the 'aggregate name' suffix.
This way one can group/filter all the runs,
and filter by the particular aggregate type.
I *might* need this for further tooling improvement.
Or maybe not.
But this is certainly worthwhile for custom tooling.
2018-09-13 12:08:15 +00:00
|
|
|
|
2016-05-24 02:44:10 +00:00
|
|
|
check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
|
|
|
|
if (BENCHMARK_HAS_CXX03_FLAG)
|
|
|
|
compile_benchmark_test(cxx03_test)
|
|
|
|
set_target_properties(cxx03_test
|
2017-07-31 00:41:57 +00:00
|
|
|
PROPERTIES
|
2020-01-31 10:16:25 +00:00
|
|
|
CXX_STANDARD 98
|
|
|
|
CXX_STANDARD_REQUIRED YES)
|
2017-07-31 00:41:57 +00:00
|
|
|
# libstdc++ provides different definitions within <map> between dialects. When
|
|
|
|
# LTO is enabled and -Werror is specified GCC diagnoses this ODR violation
|
|
|
|
# causing the test to fail to compile. To prevent this we explicitly disable
|
|
|
|
# the warning.
|
|
|
|
check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR)
|
|
|
|
if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR)
|
|
|
|
set_target_properties(cxx03_test
|
|
|
|
PROPERTIES
|
|
|
|
LINK_FLAGS "-Wno-odr")
|
|
|
|
endif()
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01)
|
2016-05-24 02:44:10 +00:00
|
|
|
endif()
|
2015-04-30 13:28:04 +00:00
|
|
|
|
2016-09-03 06:06:51 +00:00
|
|
|
# Attempt to work around flaky test failures when running on Appveyor servers.
|
|
|
|
if (DEFINED ENV{APPVEYOR})
|
2016-09-03 06:18:02 +00:00
|
|
|
set(COMPLEXITY_MIN_TIME "0.5")
|
2016-09-03 06:06:51 +00:00
|
|
|
else()
|
|
|
|
set(COMPLEXITY_MIN_TIME "0.01")
|
|
|
|
endif()
|
2016-08-28 19:24:16 +00:00
|
|
|
compile_output_test(complexity_test)
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
|
2015-04-30 13:28:04 +00:00
|
|
|
|
2017-12-13 23:26:47 +00:00
|
|
|
###############################################################################
|
|
|
|
# GoogleTest Unit Tests
|
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
if (BENCHMARK_ENABLE_GTEST_TESTS)
|
|
|
|
macro(compile_gtest name)
|
|
|
|
add_executable(${name} "${name}.cc")
|
2020-01-14 20:21:24 +00:00
|
|
|
target_link_libraries(${name} benchmark::benchmark
|
2019-04-30 10:36:29 +00:00
|
|
|
gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
2017-12-13 23:26:47 +00:00
|
|
|
endmacro(compile_gtest)
|
|
|
|
|
|
|
|
macro(add_gtest name)
|
|
|
|
compile_gtest(${name})
|
2019-11-05 19:46:13 +00:00
|
|
|
add_test(NAME ${name} COMMAND ${name})
|
2017-12-13 23:26:47 +00:00
|
|
|
endmacro()
|
|
|
|
|
2018-04-03 22:12:47 +00:00
|
|
|
add_gtest(benchmark_gtest)
|
2019-03-17 13:38:51 +00:00
|
|
|
add_gtest(benchmark_name_gtest)
|
Random interleaving of benchmark repetitions - the sequel (fixes #1051) (#1163)
Inspired by the original implementation by Hai Huang @haih-g
from https://github.com/google/benchmark/pull/1105.
The original implementation had design deficiencies that
weren't really addressable without redesign, so it was reverted.
In essence, the original implementation consisted of two separateable parts:
* reducing the amount time each repetition is run for, and symmetrically increasing repetition count
* running the repetitions in random order
While it worked fine for the usual case, it broke down when user would specify repetitions
(it would completely ignore that request), or specified per-repetition min time (while it would
still adjust the repetition count, it would not adjust the per-repetition time,
leading to much greater run times)
Here, like i was originally suggesting in the original review, i'm separating the features,
and only dealing with a single one - running repetitions in random order.
Now that the runs/repetitions are no longer in-order, the tooling may wish to sort the output,
and indeed `compare.py` has been updated to do that: #1168.
2021-06-03 18:16:54 +00:00
|
|
|
add_gtest(benchmark_random_interleaving_gtest)
|
2019-04-17 16:08:52 +00:00
|
|
|
add_gtest(commandlineflags_gtest)
|
2018-04-03 22:12:47 +00:00
|
|
|
add_gtest(statistics_gtest)
|
2018-06-05 10:36:26 +00:00
|
|
|
add_gtest(string_util_gtest)
|
2021-04-28 08:25:29 +00:00
|
|
|
add_gtest(perf_counters_gtest)
|
2017-12-13 23:26:47 +00:00
|
|
|
endif(BENCHMARK_ENABLE_GTEST_TESTS)
|
|
|
|
|
Add tests to verify assembler output -- Fix DoNotOptimize. (#530)
* Add tests to verify assembler output -- Fix DoNotOptimize.
For things like `DoNotOptimize`, `ClobberMemory`, and even `KeepRunning()`,
it is important exactly what assembly they generate. However, we currently
have no way to test this. Instead it must be manually validated every
time a change occurs -- including a change in compiler version.
This patch attempts to introduce a way to test the assembled output automatically.
It's mirrors how LLVM verifies compiler output, and it uses LLVM FileCheck to run
the tests in a similar way.
The tests function by generating the assembly for a test in CMake, and then
using FileCheck to verify the // CHECK lines in the source file are found
in the generated assembly.
Currently, the tests only run on 64-bit x86 systems under GCC and Clang,
and when FileCheck is found on the system.
Additionally, this patch tries to improve the code gen from DoNotOptimize.
This should probably be a separate change, but I needed something to test.
* Disable assembly tests on Bazel for now
* Link FIXME to github issue
* Fix Tests on OS X
* fix strip_asm.py to work on both Linux and OS X like targets
2018-03-23 22:10:47 +00:00
|
|
|
###############################################################################
|
|
|
|
# Assembly Unit Tests
|
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
if (BENCHMARK_ENABLE_ASSEMBLY_TESTS)
|
|
|
|
if (NOT LLVM_FILECHECK_EXE)
|
|
|
|
message(FATAL_ERROR "LLVM FileCheck is required when including this file")
|
|
|
|
endif()
|
|
|
|
include(AssemblyTests.cmake)
|
|
|
|
add_filecheck_test(donotoptimize_assembly_test)
|
|
|
|
add_filecheck_test(state_assembly_test)
|
|
|
|
add_filecheck_test(clobber_memory_assembly_test)
|
|
|
|
endif()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
###############################################################################
|
|
|
|
# Code Coverage Configuration
|
|
|
|
###############################################################################
|
2017-12-13 23:26:47 +00:00
|
|
|
|
2015-04-30 13:28:04 +00:00
|
|
|
# Add the coverage command(s)
|
2015-05-22 21:15:19 +00:00
|
|
|
if(CMAKE_BUILD_TYPE)
|
|
|
|
string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
|
|
|
|
endif()
|
2015-04-30 13:28:04 +00:00
|
|
|
if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
|
|
|
|
find_program(GCOV gcov)
|
|
|
|
find_program(LCOV lcov)
|
|
|
|
find_program(GENHTML genhtml)
|
|
|
|
find_program(CTEST ctest)
|
|
|
|
if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE)
|
|
|
|
add_custom_command(
|
|
|
|
OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html
|
|
|
|
COMMAND ${LCOV} -q -z -d .
|
|
|
|
COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i
|
|
|
|
COMMAND ${CTEST} --force-new-ctest-process
|
|
|
|
COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov
|
|
|
|
COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov
|
|
|
|
COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov
|
|
|
|
COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark
|
2016-05-18 17:59:34 +00:00
|
|
|
DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test
|
2015-04-30 13:28:04 +00:00
|
|
|
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
|
|
|
COMMENT "Running LCOV"
|
|
|
|
)
|
|
|
|
add_custom_target(coverage
|
|
|
|
DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html
|
|
|
|
COMMENT "LCOV report at lcov/index.html"
|
|
|
|
)
|
|
|
|
message(STATUS "Coverage command added")
|
|
|
|
else()
|
|
|
|
if (HAVE_CXX_FLAG_COVERAGE)
|
|
|
|
set(CXX_FLAG_COVERAGE_MESSAGE supported)
|
|
|
|
else()
|
|
|
|
set(CXX_FLAG_COVERAGE_MESSAGE unavailable)
|
|
|
|
endif()
|
|
|
|
message(WARNING
|
|
|
|
"Coverage not available:\n"
|
|
|
|
" gcov: ${GCOV}\n"
|
|
|
|
" lcov: ${LCOV}\n"
|
|
|
|
" genhtml: ${GENHTML}\n"
|
|
|
|
" ctest: ${CTEST}\n"
|
|
|
|
" --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}")
|
|
|
|
endif()
|
|
|
|
endif()
|