mirror of
https://github.com/Kitware/CMake.git
synced 2026-04-20 05:12:07 -05:00
Merge topic 'ctest_test-ignore-skipped-tests'
851b6c15cmCTestTestHandler: indicate why a test did not run25a7f14fHelp: add release notesab8bbef9cmCTestTestHandler: count skipped tests as disabled202a44a4cmCTestRunTest: do not count skipped tests as failed Acked-by: Kitware Robot <kwrobot@kitware.com> Reviewed-by: Craig Scott <craig.scott@crascit.com> Merge-request: !741
This commit is contained in:
7
Help/release/dev/ctest_test-ignore-skipped-tests.rst
Normal file
7
Help/release/dev/ctest_test-ignore-skipped-tests.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
ctest_test-ignore-skipped-tests
|
||||
-------------------------------
|
||||
|
||||
* When running tests, CTest learned to treat skipped tests (using the
|
||||
:prop_test:`SKIP_RETURN_CODE` property) the same as tests with the
|
||||
:prop_test:`DISABLED` property. Due to this change, CTest will not indicate
|
||||
failure when all tests are either skipped or pass.
|
||||
@@ -167,6 +167,7 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
|
||||
std::vector<std::pair<cmsys::RegularExpression, std::string> >::iterator
|
||||
passIt;
|
||||
bool forceFail = false;
|
||||
bool skipped = false;
|
||||
bool outputTestErrorsToConsole = false;
|
||||
if (!this->TestProperties->RequiredRegularExpressions.empty() &&
|
||||
this->FailedDependencies.empty()) {
|
||||
@@ -219,6 +220,7 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
|
||||
s << "SKIP_RETURN_CODE=" << this->TestProperties->SkipReturnCode;
|
||||
this->TestResult.CompletionStatus = s.str();
|
||||
cmCTestLog(this->CTest, HANDLER_OUTPUT, "***Skipped ");
|
||||
skipped = true;
|
||||
} else if ((success && !this->TestProperties->WillFail) ||
|
||||
(!success && this->TestProperties->WillFail)) {
|
||||
this->TestResult.Status = cmCTestTestHandler::COMPLETED;
|
||||
@@ -338,7 +340,9 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
|
||||
compress ? this->CompressedOutput : this->ProcessOutput;
|
||||
this->TestResult.CompressOutput = compress;
|
||||
this->TestResult.ReturnValue = this->TestProcess->GetExitValue();
|
||||
this->TestResult.CompletionStatus = "Completed";
|
||||
if (!skipped) {
|
||||
this->TestResult.CompletionStatus = "Completed";
|
||||
}
|
||||
this->TestResult.ExecutionTime = this->TestProcess->GetTotalTime();
|
||||
this->MemCheckPostProcess();
|
||||
this->ComputeWeightedCost();
|
||||
@@ -349,7 +353,7 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
|
||||
this->TestHandler->TestResults.push_back(this->TestResult);
|
||||
}
|
||||
delete this->TestProcess;
|
||||
return passed;
|
||||
return passed || skipped;
|
||||
}
|
||||
|
||||
bool cmCTestRunTest::StartAgain()
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
|
||||
#include "cmAlgorithms.h"
|
||||
#include "cmCTest.h"
|
||||
#include "cmCTestBatchTestHandler.h"
|
||||
#include "cmCTestMultiProcessHandler.h"
|
||||
@@ -495,7 +496,8 @@ int cmCTestTestHandler::ProcessHandler()
|
||||
|
||||
for (SetOfTests::iterator ftit = resultsSet.begin();
|
||||
ftit != resultsSet.end(); ++ftit) {
|
||||
if (ftit->CompletionStatus == "Disabled") {
|
||||
if (cmHasLiteralPrefix(ftit->CompletionStatus, "SKIP_RETURN_CODE=") ||
|
||||
ftit->CompletionStatus == "Disabled") {
|
||||
disabledTests.push_back(*ftit);
|
||||
}
|
||||
}
|
||||
@@ -521,17 +523,22 @@ int cmCTestTestHandler::ProcessHandler()
|
||||
if (!disabledTests.empty()) {
|
||||
cmGeneratedFileStream ofs;
|
||||
cmCTestLog(this->CTest, HANDLER_OUTPUT, std::endl
|
||||
<< "The following tests are disabled and did not run:"
|
||||
<< std::endl);
|
||||
<< "The following tests did not run:" << std::endl);
|
||||
this->StartLogFile("TestsDisabled", ofs);
|
||||
|
||||
const char* disabled_reason;
|
||||
for (std::vector<cmCTestTestHandler::cmCTestTestResult>::iterator dtit =
|
||||
disabledTests.begin();
|
||||
dtit != disabledTests.end(); ++dtit) {
|
||||
ofs << dtit->TestCount << ":" << dtit->Name << std::endl;
|
||||
if (dtit->CompletionStatus == "Disabled") {
|
||||
disabled_reason = "Disabled";
|
||||
} else {
|
||||
disabled_reason = "Skipped";
|
||||
}
|
||||
cmCTestLog(this->CTest, HANDLER_OUTPUT, "\t"
|
||||
<< std::setw(3) << dtit->TestCount << " - " << dtit->Name
|
||||
<< std::endl);
|
||||
<< " (" << disabled_reason << ")" << std::endl);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -544,6 +551,7 @@ int cmCTestTestHandler::ProcessHandler()
|
||||
for (SetOfTests::iterator ftit = resultsSet.begin();
|
||||
ftit != resultsSet.end(); ++ftit) {
|
||||
if (ftit->Status != cmCTestTestHandler::COMPLETED &&
|
||||
!cmHasLiteralPrefix(ftit->CompletionStatus, "SKIP_RETURN_CODE=") &&
|
||||
ftit->CompletionStatus != "Disabled") {
|
||||
ofs << ftit->TestCount << ":" << ftit->Name << std::endl;
|
||||
cmCTestLog(
|
||||
|
||||
@@ -199,6 +199,7 @@ add_RunCMake_test(ctest_start)
|
||||
add_RunCMake_test(ctest_submit)
|
||||
add_RunCMake_test(ctest_test)
|
||||
add_RunCMake_test(ctest_disabled_test)
|
||||
add_RunCMake_test(ctest_skipped_test)
|
||||
add_RunCMake_test(ctest_upload)
|
||||
add_RunCMake_test(ctest_fixtures)
|
||||
add_RunCMake_test(file)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*2 \- CleanupTest
|
||||
The following tests did not run:
|
||||
.*2 \- CleanupTest \(Disabled\)
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*3 \- DisabledFailingTest
|
||||
The following tests did not run:
|
||||
.*3 \- DisabledFailingTest \(Disabled\)
|
||||
+
|
||||
The following tests FAILED:
|
||||
.*2 \- FailingTest \(Failed\)
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*2 \- DisabledTest
|
||||
The following tests did not run:
|
||||
.*2 \- DisabledTest \(Disabled\)
|
||||
+
|
||||
The following tests FAILED:
|
||||
.*3 - NotRunTest \(Not Run\)
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*2 \- DisabledTest
|
||||
The following tests did not run:
|
||||
.*2 \- DisabledTest \(Disabled\)
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*2 \- DisabledTest
|
||||
The following tests did not run:
|
||||
.*2 \- DisabledTest \(Disabled\)
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests are disabled and did not run:
|
||||
.*2 \- DisabledTest
|
||||
The following tests did not run:
|
||||
.*2 \- DisabledTest \(Disabled\)
|
||||
+
|
||||
The following tests FAILED:
|
||||
.*3 - NotRunTest \(Not Run\)
|
||||
|
||||
12
Tests/RunCMake/ctest_skipped_test/CMakeLists.txt.in
Normal file
12
Tests/RunCMake/ctest_skipped_test/CMakeLists.txt.in
Normal file
@@ -0,0 +1,12 @@
|
||||
cmake_minimum_required(VERSION 3.8)
|
||||
project(@CASE_NAME@ C)
|
||||
include(CTest)
|
||||
|
||||
if (WIN32)
|
||||
set(skip_command "@CMAKE_CURRENT_LIST_DIR@/skip.bat")
|
||||
else ()
|
||||
set(skip_command "@CMAKE_CURRENT_LIST_DIR@/skip.sh")
|
||||
endif ()
|
||||
|
||||
add_test(NAME SuccessfulTest COMMAND "${CMAKE_COMMAND}" --version)
|
||||
@CASE_CMAKELISTS_SUFFIX_CODE@
|
||||
1
Tests/RunCMake/ctest_skipped_test/CTestConfig.cmake.in
Normal file
1
Tests/RunCMake/ctest_skipped_test/CTestConfig.cmake.in
Normal file
@@ -0,0 +1 @@
|
||||
set(CTEST_PROJECT_NAME "@CASE_NAME@")
|
||||
51
Tests/RunCMake/ctest_skipped_test/RunCMakeTest.cmake
Normal file
51
Tests/RunCMake/ctest_skipped_test/RunCMakeTest.cmake
Normal file
@@ -0,0 +1,51 @@
|
||||
include(RunCTest)
|
||||
|
||||
function(run_SkipTest)
|
||||
set(CASE_CMAKELISTS_SUFFIX_CODE [[
|
||||
add_test(NAME SkipTest COMMAND ${skip_command})
|
||||
|
||||
set_tests_properties(SkipTest PROPERTIES SKIP_RETURN_CODE 125)
|
||||
]])
|
||||
run_ctest(SkipTest)
|
||||
endfunction()
|
||||
run_SkipTest()
|
||||
|
||||
function(run_SkipSetupTest)
|
||||
set(CASE_CMAKELISTS_SUFFIX_CODE [[
|
||||
add_test(NAME SkipTest COMMAND ${skip_command})
|
||||
add_test(NAME SuccessfulCleanupTest COMMAND "${CMAKE_COMMAND}" --version)
|
||||
|
||||
set_tests_properties(SkipTest PROPERTIES SKIP_RETURN_CODE 125
|
||||
FIXTURES_SETUP "Foo")
|
||||
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_REQUIRED "Foo")
|
||||
set_tests_properties(SuccessfulCleanupTest PROPERTIES FIXTURES_CLEANUP "Foo")
|
||||
]])
|
||||
run_ctest(SkipSetupTest)
|
||||
endfunction()
|
||||
run_SkipSetupTest()
|
||||
|
||||
function(run_SkipRequiredTest)
|
||||
set(CASE_CMAKELISTS_SUFFIX_CODE [[
|
||||
add_test(NAME SkipTest COMMAND ${skip_command})
|
||||
add_test(NAME SuccessfulCleanupTest COMMAND "${CMAKE_COMMAND}" --version)
|
||||
|
||||
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_SETUP "Foo")
|
||||
set_tests_properties(SkipTest PROPERTIES SKIP_RETURN_CODE 125
|
||||
FIXTURES_REQUIRED "Foo")
|
||||
set_tests_properties(SuccessfulCleanupTest PROPERTIES FIXTURES_CLEANUP "Foo")
|
||||
]])
|
||||
run_ctest(SkipRequiredTest)
|
||||
endfunction()
|
||||
run_SkipRequiredTest()
|
||||
|
||||
function(run_SkipCleanupTest)
|
||||
set(CASE_CMAKELISTS_SUFFIX_CODE [[
|
||||
add_test(NAME CleanupTest COMMAND ${skip_command})
|
||||
|
||||
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_REQUIRED "Foo")
|
||||
set_tests_properties(CleanupTest PROPERTIES SKIP_RETURN_CODE 125
|
||||
FIXTURES_CLEANUP "Foo")
|
||||
]])
|
||||
run_ctest(SkipCleanupTest)
|
||||
endfunction()
|
||||
run_SkipCleanupTest()
|
||||
11
Tests/RunCMake/ctest_skipped_test/SkipCleanupTest-stdout.txt
Normal file
11
Tests/RunCMake/ctest_skipped_test/SkipCleanupTest-stdout.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
Start 1: SuccessfulTest
|
||||
1/2 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
|
||||
Start 2: CleanupTest
|
||||
2/2 Test #2: CleanupTest ......................\*\*\*\Skipped +[0-9.]+ sec
|
||||
+
|
||||
100% tests passed, 0 tests failed out of 2
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests did not run:
|
||||
.*2 \- CleanupTest \(Skipped\)
|
||||
@@ -0,0 +1,13 @@
|
||||
Start 1: SuccessfulTest
|
||||
1/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
|
||||
Start 2: SkipTest
|
||||
2/3 Test #2: SkipTest .........................\*\*\*\Skipped +[0-9.]+ sec
|
||||
Start 3: SuccessfulCleanupTest
|
||||
3/3 Test #3: SuccessfulCleanupTest ............ Passed +[0-9.]+ sec
|
||||
+
|
||||
100% tests passed, 0 tests failed out of 3
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests did not run:
|
||||
.*2 \- SkipTest \(Skipped\)
|
||||
13
Tests/RunCMake/ctest_skipped_test/SkipSetupTest-stdout.txt
Normal file
13
Tests/RunCMake/ctest_skipped_test/SkipSetupTest-stdout.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
Start 2: SkipTest
|
||||
1/3 Test #2: SkipTest .........................\*\*\*\Skipped +[0-9.]+ sec
|
||||
Start 1: SuccessfulTest
|
||||
2/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
|
||||
Start 3: SuccessfulCleanupTest
|
||||
3/3 Test #3: SuccessfulCleanupTest ............ Passed +[0-9.]+ sec
|
||||
+
|
||||
100% tests passed, 0 tests failed out of 3
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests did not run:
|
||||
.*2 \- SkipTest \(Skipped\)
|
||||
11
Tests/RunCMake/ctest_skipped_test/SkipTest-stdout.txt
Normal file
11
Tests/RunCMake/ctest_skipped_test/SkipTest-stdout.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
Start 1: SuccessfulTest
|
||||
1/2 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
|
||||
Start 2: SkipTest
|
||||
2/2 Test #2: SkipTest .........................\*\*\*\Skipped +[0-9.]+ sec
|
||||
+
|
||||
100% tests passed, 0 tests failed out of 2
|
||||
+
|
||||
Total Test time \(real\) = +[0-9.]+ sec
|
||||
+
|
||||
The following tests did not run:
|
||||
.*2 \- SkipTest \(Skipped\)
|
||||
1
Tests/RunCMake/ctest_skipped_test/skip.bat
Executable file
1
Tests/RunCMake/ctest_skipped_test/skip.bat
Executable file
@@ -0,0 +1 @@
|
||||
EXIT 125
|
||||
3
Tests/RunCMake/ctest_skipped_test/skip.sh
Executable file
3
Tests/RunCMake/ctest_skipped_test/skip.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
exit 125
|
||||
16
Tests/RunCMake/ctest_skipped_test/test.cmake.in
Normal file
16
Tests/RunCMake/ctest_skipped_test/test.cmake.in
Normal file
@@ -0,0 +1,16 @@
|
||||
cmake_minimum_required(VERSION 3.7)
|
||||
|
||||
set(CTEST_SITE "test-site")
|
||||
set(CTEST_BUILD_NAME "test-build-name")
|
||||
set(CTEST_SOURCE_DIRECTORY "@RunCMake_BINARY_DIR@/@CASE_NAME@")
|
||||
set(CTEST_BINARY_DIRECTORY "@RunCMake_BINARY_DIR@/@CASE_NAME@-build")
|
||||
set(CTEST_CMAKE_GENERATOR "@RunCMake_GENERATOR@")
|
||||
set(CTEST_CMAKE_GENERATOR_PLATFORM "@RunCMake_GENERATOR_PLATFORM@")
|
||||
set(CTEST_CMAKE_GENERATOR_TOOLSET "@RunCMake_GENERATOR_TOOLSET@")
|
||||
set(CTEST_BUILD_CONFIGURATION "$ENV{CMAKE_CONFIG_TYPE}")
|
||||
|
||||
set(ctest_test_args "@CASE_CTEST_TEST_ARGS@")
|
||||
ctest_start(Experimental)
|
||||
ctest_configure()
|
||||
ctest_build()
|
||||
ctest_test(${ctest_test_args})
|
||||
Reference in New Issue
Block a user