mirror of
https://github.com/Kitware/CMake.git
synced 2026-01-02 20:00:38 -06:00
ctest: add command line option to run the tests listed in a given file
Add `--tests-from-file <filename>` to run only the tests listed in the given file. The test names must match exactly, no regexps or something. The listed tests can still be filtered with a regexp using -R. Issue: #25455
This commit is contained in:
@@ -234,6 +234,18 @@ Run Tests
|
||||
of the test's labels (i.e. the multiple ``-LE`` labels form an ``AND``
|
||||
relationship). See `Label Matching`_.
|
||||
|
||||
.. option:: --tests-from-file <filename>
|
||||
|
||||
.. versionadded:: 3.29
|
||||
|
||||
Run tests listed in the given file.
|
||||
|
||||
This option tells CTest to run the tests which are listed in the given
|
||||
file. The file must contain one exact test name per line.
|
||||
Lines can be commented out using a ``#``.
|
||||
This option can be combined with the other options like
|
||||
``-R``, ``-E``, ``-L`` or ``-LE``.
|
||||
|
||||
.. option:: -FA <regex>, --fixture-exclude-any <regex>
|
||||
|
||||
Exclude fixtures matching ``<regex>`` from automatically adding any tests to
|
||||
|
||||
5
Help/release/dev/ctest-tests-from-file.rst
Normal file
5
Help/release/dev/ctest-tests-from-file.rst
Normal file
@@ -0,0 +1,5 @@
|
||||
ctest-tests-from-file
|
||||
---------------------
|
||||
|
||||
* :manual:`ctest(1)` gained the :option:`--tests-from-file <ctest
|
||||
--tests-from-file>` option to run tests named in a file.
|
||||
@@ -345,6 +345,7 @@ void cmCTestTestHandler::Initialize()
|
||||
this->ExcludeFixtureRegExp.clear();
|
||||
this->ExcludeFixtureSetupRegExp.clear();
|
||||
this->ExcludeFixtureCleanupRegExp.clear();
|
||||
this->TestListFile.clear();
|
||||
|
||||
this->TestsToRunString.clear();
|
||||
this->UseUnion = false;
|
||||
@@ -585,6 +586,10 @@ bool cmCTestTestHandler::ProcessOptions()
|
||||
if (val) {
|
||||
this->ResourceSpecFile = *val;
|
||||
}
|
||||
val = this->GetOption("TestListFile");
|
||||
if (val) {
|
||||
this->TestListFile = val;
|
||||
}
|
||||
this->SetRerunFailed(cmIsOn(this->GetOption("RerunFailed")));
|
||||
|
||||
return true;
|
||||
@@ -933,6 +938,14 @@ bool cmCTestTestHandler::ComputeTestList()
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!this->TestsToRunByName.empty()) {
|
||||
if (this->TestsToRunByName.find(tp.Name) ==
|
||||
this->TestsToRunByName.end()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
tp.Index = cnt; // save the index into the test list for this test
|
||||
finalList.push_back(tp);
|
||||
}
|
||||
@@ -1818,6 +1831,11 @@ bool cmCTestTestHandler::GetListOfTests()
|
||||
if (this->ResourceSpecFile.empty() && specFile) {
|
||||
this->ResourceSpecFile = *specFile;
|
||||
}
|
||||
|
||||
if (!this->TestListFile.empty()) {
|
||||
this->ReadTestListFile(this->TestListFile);
|
||||
}
|
||||
|
||||
cmCTestOptionalLog(this->CTest, HANDLER_VERBOSE_OUTPUT,
|
||||
"Done constructing a list of tests" << std::endl,
|
||||
this->Quiet);
|
||||
@@ -1986,6 +2004,29 @@ void cmCTestTestHandler::ExpandTestsToRunInformationForRerunFailed()
|
||||
}
|
||||
}
|
||||
|
||||
void cmCTestTestHandler::ReadTestListFile(const std::string& testListFileName)
|
||||
{
|
||||
cmsys::ifstream ifs(testListFileName.c_str());
|
||||
if (ifs) {
|
||||
std::string line;
|
||||
while (cmSystemTools::GetLineFromStream(ifs, line)) {
|
||||
std::string trimmed = cmTrimWhitespace(line);
|
||||
if (trimmed.empty() || (trimmed[0] == '#')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this->TestsToRunByName.insert(trimmed);
|
||||
}
|
||||
ifs.close();
|
||||
} else if (!this->CTest->GetShowOnly() &&
|
||||
!this->CTest->ShouldPrintLabels()) {
|
||||
cmCTestLog(this->CTest, ERROR_MESSAGE,
|
||||
"Problem reading test list file: "
|
||||
<< testListFileName
|
||||
<< " while generating list of tests to run." << std::endl);
|
||||
}
|
||||
}
|
||||
|
||||
void cmCTestTestHandler::RecordCustomTestMeasurements(cmXMLWriter& xml,
|
||||
std::string content)
|
||||
{
|
||||
|
||||
@@ -341,6 +341,7 @@ private:
|
||||
std::string GetTestStatus(cmCTestTestResult const&);
|
||||
void ExpandTestsToRunInformation(size_t numPossibleTests);
|
||||
void ExpandTestsToRunInformationForRerunFailed();
|
||||
void ReadTestListFile(const std::string& testListFileName);
|
||||
|
||||
std::vector<std::string> CustomPreTest;
|
||||
std::vector<std::string> CustomPostTest;
|
||||
@@ -359,6 +360,8 @@ private:
|
||||
std::vector<cmsys::RegularExpression> ExcludeLabelRegularExpressions;
|
||||
cmsys::RegularExpression IncludeTestsRegularExpression;
|
||||
cmsys::RegularExpression ExcludeTestsRegularExpression;
|
||||
std::string TestListFile;
|
||||
std::set<std::string> TestsToRunByName;
|
||||
|
||||
std::string ResourceSpecFile;
|
||||
|
||||
|
||||
@@ -2224,6 +2224,13 @@ bool cmCTest::HandleCommandLineArguments(size_t& i,
|
||||
args[i]);
|
||||
}
|
||||
|
||||
else if (this->CheckArgument(arg, "--tests-from-file"_s) &&
|
||||
i < args.size() - 1) {
|
||||
i++;
|
||||
this->GetTestHandler()->SetPersistentOption("TestListFile", args[i]);
|
||||
this->GetMemCheckHandler()->SetPersistentOption("TestListFile", args[i]);
|
||||
}
|
||||
|
||||
else if (this->CheckArgument(arg, "--rerun-failed"_s)) {
|
||||
this->GetTestHandler()->SetPersistentOption("RerunFailed", "true");
|
||||
this->GetMemCheckHandler()->SetPersistentOption("RerunFailed", "true");
|
||||
|
||||
@@ -107,6 +107,7 @@ const cmDocumentationEntry cmDocumentationOptions[] = {
|
||||
"Run a specific number of tests by number." },
|
||||
{ "-U, --union", "Take the Union of -I and -R" },
|
||||
{ "--rerun-failed", "Run only the tests that failed previously" },
|
||||
{ "--tests-from-file <file>", "Run the tests listed in the given file" },
|
||||
{ "--repeat until-fail:<n>, --repeat-until-fail <n>",
|
||||
"Require each test to run <n> times without failing in order to pass" },
|
||||
{ "--repeat until-pass:<n>",
|
||||
|
||||
@@ -207,6 +207,23 @@ set_tests_properties(test1 PROPERTIES SKIP_REGULAR_EXPRESSION \"test1\")
|
||||
endfunction()
|
||||
run_SkipRegexFoundTest()
|
||||
|
||||
|
||||
function(run_TestsFromFileTest arg)
|
||||
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/TestsFromFile)
|
||||
set(RunCMake_TEST_NO_CLEAN 1)
|
||||
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
|
||||
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
|
||||
|
||||
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
|
||||
add_test(Test1 \"${CMAKE_COMMAND}\" -E echo \"test1\")
|
||||
add_test(Test2 \"${CMAKE_COMMAND}\" -E echo \"test2\")
|
||||
add_test(Test11 \"${CMAKE_COMMAND}\" -E echo \"test11\")
|
||||
")
|
||||
run_cmake_command(TestsFromFile-${arg} ${CMAKE_CTEST_COMMAND} --${arg} ${RunCMake_SOURCE_DIR}/TestsFromFile-TestList.txt )
|
||||
endfunction()
|
||||
run_TestsFromFileTest(tests-from-file)
|
||||
|
||||
|
||||
function(run_SerialFailed)
|
||||
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/SerialFailed)
|
||||
set(RunCMake_TEST_NO_CLEAN 1)
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
Test1
|
||||
# Test11
|
||||
@@ -0,0 +1,5 @@
|
||||
+Start 1: Test1
|
||||
1/1 Test #1: Test1 ............................ Passed +[0-9.]+ sec
|
||||
+
|
||||
100% tests passed, 0 tests failed out of 1
|
||||
+
|
||||
Reference in New Issue
Block a user