mirror of
https://github.com/Kitware/CMake.git
synced 2026-01-03 12:19:41 -06:00
The `ctest --test-load` option is implemented in `StartNextTests` by not starting any tests when the load is too high and instead sleeping and then returning. Prior to commit v3.11.0-rc1~117^2 (CTest: Re-implement test process handling using libuv, 2017-12-10) our outer loop in `RunTests` would immediately call `StartNextTests` again. However, now the `uv_run` loop may simply terminate if there are no tests running because no events are left pending. Fix this by converting the sleep in `StartNextTests` into a libuv timer that it starts instead. This avoids leaving `uv_run` with no pending events. In the case that there are other running tests this also allows CTest to detect when they finish even if it during the wait period where we previously slept. This regression was not caught by the test suite because it only verified that we do not start new tests when the load was too high and not that we proceed to start tests when the load drops. Revise the test suite to cover both. Fixes: #18338
164 lines
5.8 KiB
CMake
164 lines
5.8 KiB
CMake
include(RunCMake)
|
|
set(RunCMake_TEST_TIMEOUT 60)
|
|
|
|
unset(ENV{CTEST_PARALLEL_LEVEL})
|
|
unset(ENV{CTEST_OUTPUT_ON_FAILURE})
|
|
|
|
run_cmake_command(repeat-until-fail-bad1
|
|
${CMAKE_CTEST_COMMAND} --repeat-until-fail
|
|
)
|
|
run_cmake_command(repeat-until-fail-bad2
|
|
${CMAKE_CTEST_COMMAND} --repeat-until-fail foo
|
|
)
|
|
run_cmake_command(repeat-until-fail-good
|
|
${CMAKE_CTEST_COMMAND} --repeat-until-fail 2
|
|
)
|
|
|
|
function(run_repeat_until_fail_tests)
|
|
# Use a single build tree for a few tests without cleaning.
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/repeat-until-fail-build)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
|
|
run_cmake(repeat-until-fail-cmake)
|
|
run_cmake_command(repeat-until-fail-ctest
|
|
${CMAKE_CTEST_COMMAND} -C Debug --repeat-until-fail 3
|
|
)
|
|
endfunction()
|
|
run_repeat_until_fail_tests()
|
|
|
|
function(run_BadCTestTestfile)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/BadCTestTestfile)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
subdirs()
|
|
")
|
|
|
|
run_cmake_command(BadCTestTestfile ${CMAKE_CTEST_COMMAND})
|
|
endfunction()
|
|
run_BadCTestTestfile()
|
|
|
|
function(run_MergeOutput)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/MergeOutput)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(MergeOutput \"${CMAKE_COMMAND}\" -P \"${RunCMake_SOURCE_DIR}/MergeOutput.cmake\")
|
|
")
|
|
|
|
run_cmake_command(MergeOutput ${CMAKE_CTEST_COMMAND} -V)
|
|
endfunction()
|
|
run_MergeOutput()
|
|
|
|
function(run_LabelCount)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/LabelCount)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(test1 \"${CMAKE_COMMAND}\" -E echo \"test1\")
|
|
set_tests_properties(test1 PROPERTIES LABELS 'bar')
|
|
|
|
add_test(test2 \"${CMAKE_COMMAND}\" -E echo \"test2\")
|
|
set_tests_properties(test2 PROPERTIES LABELS 'bar')
|
|
|
|
add_test(test3 \"${CMAKE_COMMAND}\" -E echo \"test3\")
|
|
set_tests_properties(test3 PROPERTIES LABELS 'foo')
|
|
|
|
add_test(test4 \"${CMAKE_COMMAND}\" -E echo \"test4\")
|
|
set_tests_properties(test4 PROPERTIES LABELS 'bar')
|
|
")
|
|
|
|
run_cmake_command(LabelCount ${CMAKE_CTEST_COMMAND} -V)
|
|
endfunction()
|
|
|
|
run_LabelCount()
|
|
|
|
function(run_SerialFailed)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/SerialFailed)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(NoSuchCommand no_such_command)
|
|
set_tests_properties(NoSuchCommand PROPERTIES RUN_SERIAL ON)
|
|
add_test(Echo \"${CMAKE_COMMAND}\" -E echo \"EchoTest\")
|
|
")
|
|
|
|
run_cmake_command(SerialFailed ${CMAKE_CTEST_COMMAND} -V)
|
|
endfunction()
|
|
run_SerialFailed()
|
|
|
|
function(run_TestLoad name load)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/TestLoad)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(TestLoad1 \"${CMAKE_COMMAND}\" -E echo \"test of --test-load\")
|
|
add_test(TestLoad2 \"${CMAKE_COMMAND}\" -E echo \"test of --test-load\")
|
|
")
|
|
run_cmake_command(${name} ${CMAKE_CTEST_COMMAND} -j2 --test-load ${load})
|
|
endfunction()
|
|
|
|
# Tests for the --test-load feature of ctest
|
|
#
|
|
# Spoof a load average value to make these tests more reliable.
|
|
set(ENV{__CTEST_FAKE_LOAD_AVERAGE_FOR_TESTING} 5)
|
|
|
|
# Verify that new tests are not started when the load average exceeds
|
|
# our threshold and that they then run once the load average drops.
|
|
run_TestLoad(test-load-wait 3)
|
|
|
|
# Verify that warning message is displayed but tests still start when
|
|
# an invalid argument is given.
|
|
run_TestLoad(test-load-invalid 'two')
|
|
|
|
# Verify that new tests are started when the load average falls below
|
|
# our threshold.
|
|
run_TestLoad(test-load-pass 10)
|
|
|
|
unset(ENV{__CTEST_FAKE_LOAD_AVERAGE_FOR_TESTING})
|
|
|
|
function(run_TestOutputSize)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/TestOutputSize)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(PassingTest \"${CMAKE_COMMAND}\" -E echo PassingTestOutput)
|
|
add_test(FailingTest \"${CMAKE_COMMAND}\" -E no_such_command)
|
|
")
|
|
run_cmake_command(TestOutputSize
|
|
${CMAKE_CTEST_COMMAND} -M Experimental -T Test
|
|
--no-compress-output
|
|
--test-output-size-passed 10
|
|
--test-output-size-failed 12
|
|
)
|
|
endfunction()
|
|
run_TestOutputSize()
|
|
|
|
function(run_TestAffinity)
|
|
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/TestAffinity)
|
|
set(RunCMake_TEST_NO_CLEAN 1)
|
|
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
|
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
|
# Create a test with affinity enabled. The default PROCESSORS
|
|
# value is 1, so our expected output checks that this is the
|
|
# number of processors in the mask.
|
|
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
|
add_test(Affinity \"${TEST_AFFINITY}\")
|
|
set_tests_properties(Affinity PROPERTIES PROCESSOR_AFFINITY ON)
|
|
")
|
|
# Run ctest with a large parallel level so that the value is
|
|
# not responsible for capping the number of processors available.
|
|
run_cmake_command(TestAffinity ${CMAKE_CTEST_COMMAND} -V -j 64)
|
|
endfunction()
|
|
if(TEST_AFFINITY)
|
|
run_TestAffinity()
|
|
endif()
|