ci: fix unwanted workflow skip in the cron pipelines (#2117)

* ci: run workflow if not found in the list

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: suffix event in cache key to separate cron and push pipelines

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: prefix test workflow with test

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: create empty cache dir for unit tests

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

---------

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
This commit is contained in:
Sawjan Gurung
2026-01-08 14:12:40 +05:45
committed by GitHub
parent 186bbc85d4
commit b2c0e8d4db
3 changed files with 21 additions and 5 deletions

View File

@@ -859,6 +859,7 @@ def testOpencloud(ctx):
prefixStepCommands(pipeline, [
". ./.woodpecker.env",
"mkdir -p cache",
'[ "$SKIP_WORKFLOW" = "true" ] && exit 0',
])
return [pipeline]
@@ -1744,7 +1745,7 @@ def multiServiceE2ePipeline(ctx):
uploadTracingResult(ctx)
pipeline = {
"name": "e2e-tests-multi-service%s" % ("-watchfs" if watch_fs_enabled else ""),
"name": "test-e2e-multi-service%s" % ("-watchfs" if watch_fs_enabled else ""),
"steps": steps,
"depends_on": getPipelineNames(buildOpencloudBinaryForTesting(ctx) + buildWebCache(ctx)),
"when": e2e_trigger,

View File

@@ -3,11 +3,20 @@ const fs = require("fs");
const CI_REPO_NAME = process.env.CI_REPO_NAME;
const CI_COMMIT_SHA = process.env.CI_COMMIT_SHA;
const CI_WORKFLOW_NAME = process.env.CI_WORKFLOW_NAME;
const CI_PIPELINE_EVENT = process.env.CI_PIPELINE_EVENT;
const opencloudBuildWorkflow = "build-opencloud-for-testing";
const webCacheWorkflows = ["cache-web", "cache-web-pnpm", "cache-browsers"];
const INFO_URL = `https://s3.ci.opencloud.eu/public/${CI_REPO_NAME}/pipelines/${CI_COMMIT_SHA}/pipeline_info.json`;
const INFO_URL = `https://s3.ci.opencloud.eu/public/${CI_REPO_NAME}/pipelines/${CI_COMMIT_SHA}-${CI_PIPELINE_EVENT}/pipeline_info.json`;
function getWorkflowNames(workflows) {
const allWorkflows = [];
for (const workflow of workflows) {
allWorkflows.push(workflow.name);
}
return allWorkflows;
}
function getFailedWorkflows(workflows) {
const failedWorkflows = [];
@@ -59,6 +68,7 @@ async function main() {
process.exit(0);
}
const allWorkflows = getWorkflowNames(info.workflows);
const failedWorkflows = getFailedWorkflows(info.workflows);
// NOTE: implement for test pipelines only for now
@@ -78,6 +88,9 @@ async function main() {
// process.exit(0);
// }
if (!allWorkflows.includes(CI_WORKFLOW_NAME)) {
process.exit(0);
}
if (!failedWorkflows.includes(CI_WORKFLOW_NAME)) {
console.log("[INFO] Workflow passed in previous pipeline. Skip...");
fs.appendFileSync(".woodpecker.env", "SKIP_WORKFLOW=true\n");

View File

@@ -2,10 +2,12 @@
set -e
CACHE_KEY="$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA-$CI_PIPELINE_EVENT"
mc alias set s3 $MC_HOST $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
# check previous pipeline
URL="https://s3.ci.opencloud.eu/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/prev_pipeline"
URL="https://s3.ci.opencloud.eu/$CACHE_KEY/prev_pipeline"
status=$(curl -s -o prev_pipeline "$URL" -w '%{http_code}')
if [ "$status" == "200" ];
@@ -19,9 +21,9 @@ then
exit 1
fi
# update previous pipeline info
mc cp -a pipeline_info.json "s3/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/"
mc cp -a pipeline_info.json "s3/$CACHE_KEY/"
fi
# upload current pipeline number for the next pipeline
echo "PREV_PIPELINE_NUMBER=$CI_PIPELINE_NUMBER" > prev_pipeline
mc cp -a prev_pipeline "s3/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/"
mc cp -a prev_pipeline "s3/$CACHE_KEY/"