From e234d9e9d701ba5a3a9bc25c4727bb68b0f9311e Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:30:42 -0400 Subject: [PATCH 1/9] fix: allow forks to build PR plugins --- .github/workflows/pr-plugin-build.yml | 99 +++-------- .github/workflows/pr-plugin-upload.yml | 228 +++++++++++++++++++++++++ deploy_to_unraid.sh | 105 ++++++++++++ 3 files changed, 353 insertions(+), 79 deletions(-) create mode 100644 .github/workflows/pr-plugin-upload.yml create mode 100755 deploy_to_unraid.sh diff --git a/.github/workflows/pr-plugin-build.yml b/.github/workflows/pr-plugin-build.yml index eee988ea4..d14013e2a 100644 --- a/.github/workflows/pr-plugin-build.yml +++ b/.github/workflows/pr-plugin-build.yml @@ -9,7 +9,7 @@ on: permissions: contents: read - pull-requests: write + pull-requests: read actions: read jobs: @@ -142,19 +142,25 @@ jobs: "${{ steps.version.outputs.txz_url }}" \ "${{ steps.version.outputs.plugin_url }}" - - name: Upload PLG to R2 + - name: Save metadata for upload workflow if: steps.changed-files.outputs.has_changes == 'true' - id: upload-plg run: | - # Upload PLG - overwrite existing for updates (consistent filename) - aws s3 cp "${{ steps.version.outputs.plugin_name }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.version.outputs.plugin_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ - --acl public-read - - echo "Uploaded PLG to: ${{ steps.version.outputs.plugin_url }}" - - - name: Upload artifacts to GitHub (backup) + cat > pr-metadata.json << EOF + { + "pr_number": ${{ github.event.pull_request.number }}, + "version": "${{ steps.version.outputs.version }}", + "pr_version": "${{ steps.version.outputs.pr_version }}", + "local_txz": "${{ steps.version.outputs.local_txz }}", + "remote_txz": "${{ steps.version.outputs.remote_txz }}", + "plugin_name": "${{ steps.version.outputs.plugin_name }}", + "changed_files": $(cat changed_files.txt | jq -R -s -c 'split("\n") | map(select(length > 0))') + } + EOF + + echo "Metadata saved:" + cat pr-metadata.json + + - name: Upload artifacts to GitHub if: steps.changed-files.outputs.has_changes == 'true' uses: actions/upload-artifact@v4 with: @@ -162,72 +168,7 @@ jobs: path: | webgui-pr-*.plg webgui-pr-*.tar.gz + pr-metadata.json + changed_files.txt retention-days: 30 - - name: Format changed files list - if: steps.changed-files.outputs.has_changes == 'true' - id: format-files - run: | - # Format the file list for the comment - echo "files<> $GITHUB_OUTPUT - cat changed_files.txt >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - - # Debug output - echo "Changed files found:" - cat changed_files.txt - - - name: Comment on PR - if: steps.changed-files.outputs.has_changes == 'true' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: pr-plugin - message: | - ## 🔧 PR Test Plugin Available - - A test plugin has been generated for this PR that includes the modified files. - - **Version:** `${{ steps.version.outputs.version }}` - **Build:** [View Workflow Run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) - - ### đŸ“Ĩ Installation Instructions: - - **Install via Unraid Web UI:** - 1. Go to **Plugins → Install Plugin** - 2. Copy and paste this URL: - ``` - ${{ steps.version.outputs.plugin_url }} - ``` - 3. Click **Install** - - **Alternative: Direct Download** - - [đŸ“Ļ Download PLG](${{ steps.version.outputs.plugin_url }}) - - [đŸ“Ļ Download TXZ](${{ steps.version.outputs.txz_url }}) - - ### âš ī¸ Important Notes: - - - **Testing only:** This plugin is for testing PR changes - - **Backup included:** Original files are automatically backed up - - **Easy removal:** Files are restored when plugin is removed - - **Conflicts:** Remove this plugin before installing production updates - - ### 📝 Modified Files: - -
- Click to expand file list - - ``` - ${{ steps.format-files.outputs.files }} - ``` - -
- - ### 🔄 To Remove: - - Navigate to Plugins → Installed Plugins and remove `webgui-pr-${{ steps.version.outputs.version }}`, or run: - ```bash - plugin remove webgui-pr-${{ steps.version.outputs.version }} - ``` - - --- - 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml new file mode 100644 index 000000000..45794bc66 --- /dev/null +++ b/.github/workflows/pr-plugin-upload.yml @@ -0,0 +1,228 @@ +name: Upload PR Plugin to R2 + +on: + workflow_run: + workflows: ["Build PR Plugin"] + types: + - completed + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + upload-to-r2: + runs-on: ubuntu-latest + # Only run if the build workflow succeeded + if: ${{ github.event.workflow_run.conclusion == 'success' }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Download artifacts from build workflow + uses: actions/github-script@v7 + with: + script: | + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{ github.event.workflow_run.id }}, + }); + + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name.startsWith('webgui-pr-plugin-') + })[0]; + + if (!matchArtifact) { + core.setFailed('No artifacts found from build workflow'); + return; + } + + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + + let fs = require('fs'); + fs.writeFileSync('artifacts.zip', Buffer.from(download.data)); + + core.setOutput('artifact_name', matchArtifact.name); + + - name: Extract artifacts + run: | + unzip artifacts.zip + ls -la + + # Check if metadata exists + if [ ! -f "pr-metadata.json" ]; then + echo "No metadata file found, build may not have produced any changes" + echo "has_artifacts=false" >> "$GITHUB_ENV" + exit 0 + fi + + echo "has_artifacts=true" >> "$GITHUB_ENV" + + # Extract metadata + echo "Metadata contents:" + cat pr-metadata.json + + - name: Parse metadata + if: env.has_artifacts == 'true' + id: metadata + run: | + # Extract values from metadata + PR_NUMBER=$(jq -r '.pr_number' pr-metadata.json) + VERSION=$(jq -r '.version' pr-metadata.json) + PR_VERSION=$(jq -r '.pr_version' pr-metadata.json) + LOCAL_TXZ=$(jq -r '.local_txz' pr-metadata.json) + REMOTE_TXZ=$(jq -r '.remote_txz' pr-metadata.json) + PLUGIN_NAME=$(jq -r '.plugin_name' pr-metadata.json) + + # Generate R2 URLs and keys + S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" + TXZ_URL="${S3_BASE_URL}/${REMOTE_TXZ}" + PLUGIN_URL="${S3_BASE_URL}/${PLUGIN_NAME}" + TXZ_KEY="pr-plugins/pr-${PR_NUMBER}/${REMOTE_TXZ}" + PLUGIN_KEY="pr-plugins/pr-${PR_NUMBER}/${PLUGIN_NAME}" + + # Output for next steps + echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "pr_version=$PR_VERSION" >> $GITHUB_OUTPUT + echo "local_txz=$LOCAL_TXZ" >> $GITHUB_OUTPUT + echo "remote_txz=$REMOTE_TXZ" >> $GITHUB_OUTPUT + echo "plugin_name=$PLUGIN_NAME" >> $GITHUB_OUTPUT + echo "txz_url=$TXZ_URL" >> $GITHUB_OUTPUT + echo "plugin_url=$PLUGIN_URL" >> $GITHUB_OUTPUT + echo "txz_key=$TXZ_KEY" >> $GITHUB_OUTPUT + echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT + + # Also extract changed files for comment + jq -r '.changed_files[]' pr-metadata.json > changed_files.txt + echo "Changed files:" + cat changed_files.txt + + - name: Configure AWS CLI for R2 + if: env.has_artifacts == 'true' + run: | + aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + aws configure set region auto + + - name: Upload TXZ to R2 + if: env.has_artifacts == 'true' + run: | + # Upload to R2 with versioned filename + aws s3 cp "${{ steps.metadata.outputs.local_txz }}" \ + "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.txz_key }}" \ + --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + --acl public-read + + echo "Uploaded TXZ to: ${{ steps.metadata.outputs.txz_url }}" + + - name: Regenerate plugin file with correct R2 URLs + if: env.has_artifacts == 'true' + run: | + # Regenerate the plugin with the actual R2 URLs + bash .github/scripts/generate-pr-plugin.sh \ + "${{ steps.metadata.outputs.version }}" \ + "${{ steps.metadata.outputs.pr_number }}" \ + "$(echo '${{ steps.metadata.outputs.pr_version }}' | cut -d. -f3)" \ + "${{ steps.metadata.outputs.local_txz }}" \ + "${{ steps.metadata.outputs.remote_txz }}" \ + "${{ steps.metadata.outputs.txz_url }}" \ + "${{ steps.metadata.outputs.plugin_url }}" + + - name: Upload PLG to R2 + if: env.has_artifacts == 'true' + run: | + # Upload PLG - overwrite existing for updates + aws s3 cp "${{ steps.metadata.outputs.plugin_name }}" \ + "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.plugin_key }}" \ + --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + --acl public-read + + echo "Uploaded PLG to: ${{ steps.metadata.outputs.plugin_url }}" + + - name: Format changed files list + if: env.has_artifacts == 'true' + id: format-files + run: | + # Format the file list for the comment + echo "files<> $GITHUB_OUTPUT + cat changed_files.txt >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + - name: Get PR info + if: env.has_artifacts == 'true' + id: pr-info + uses: actions/github-script@v7 + with: + script: | + const pr_number = ${{ steps.metadata.outputs.pr_number }}; + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: pr_number + }); + core.setOutput('pr_number', pr_number); + + - name: Comment on PR + if: env.has_artifacts == 'true' + uses: marocchino/sticky-pull-request-comment@v2 + with: + number: ${{ steps.pr-info.outputs.pr_number }} + header: pr-plugin + message: | + ## 🔧 PR Test Plugin Available + + A test plugin has been generated for this PR that includes the modified files. + + **Version:** `${{ steps.metadata.outputs.version }}` + **Build:** [View Workflow Run](${{ github.event.workflow_run.html_url }}) + + ### đŸ“Ĩ Installation Instructions: + + **Install via Unraid Web UI:** + 1. Go to **Plugins → Install Plugin** + 2. Copy and paste this URL: + ``` + ${{ steps.metadata.outputs.plugin_url }} + ``` + 3. Click **Install** + + **Alternative: Direct Download** + - [đŸ“Ļ Download PLG](${{ steps.metadata.outputs.plugin_url }}) + - [đŸ“Ļ Download TXZ](${{ steps.metadata.outputs.txz_url }}) + + ### âš ī¸ Important Notes: + + - **Testing only:** This plugin is for testing PR changes + - **Backup included:** Original files are automatically backed up + - **Easy removal:** Files are restored when plugin is removed + - **Conflicts:** Remove this plugin before installing production updates + + ### 📝 Modified Files: + +
+ Click to expand file list + + ``` + ${{ steps.format-files.outputs.files }} + ``` + +
+ + ### 🔄 To Remove: + + Navigate to Plugins → Installed Plugins and remove `webgui-pr-${{ steps.metadata.outputs.version }}`, or run: + ```bash + plugin remove webgui-pr-${{ steps.metadata.outputs.version }} + ``` + + --- + 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file diff --git a/deploy_to_unraid.sh b/deploy_to_unraid.sh new file mode 100755 index 000000000..6fe98a4f3 --- /dev/null +++ b/deploy_to_unraid.sh @@ -0,0 +1,105 @@ +#!/bin/bash + +# Deploy script for unRAID webGUI updates +# Deploys only git-modified files to the target server +# Usage: ./deploy_to_unraid.sh + +# Show help if requested +if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then + echo "Usage: $0 " + echo "" + echo "Deploy git-modified files to unRAID server" + echo "" + echo "Arguments:" + echo " target_host SSH target (required)" + echo "" + echo "Examples:" + echo " $0 root@192.168.1.100 # Deploy to specific IP" + echo " $0 root@tower.local # Deploy to named host" + echo " $0 root@unraid.local # Deploy to unraid.local" + exit 0 +fi + +# Get target host from command line (required) +if [ $# -eq 0 ]; then + echo "❌ Error: Target host required" + echo "Usage: $0 " + echo "Example: $0 root@192.168.1.100" + exit 1 +fi + +TARGET_HOST="$1" +echo "â„šī¸ Deploying to: $TARGET_HOST" + +TARGET_EMHTTP="/usr/local/emhttp" + +echo "🚀 Deploying git-modified files to unRAID..." + +# Check for additional files to deploy (passed as arguments) +ADDITIONAL_FILES="" +if [ $# -gt 1 ]; then + shift # Remove the target host from arguments + for FILE in "$@"; do + if [ -f "$FILE" ]; then + ADDITIONAL_FILES="$ADDITIONAL_FILES$FILE\n" + fi + done +fi + +# Get list of modified files from git (excluding deleted files) +GIT_FILES=$(git diff --name-only --diff-filter=ACMR HEAD | grep -E "^emhttp/" || true) + +# Get list of untracked files +UNTRACKED_FILES=$(git ls-files --others --exclude-standard | grep -E "^emhttp/" || true) + +# Combine all files +FILES="" +[ -n "$GIT_FILES" ] && FILES="$FILES$GIT_FILES\n" +[ -n "$UNTRACKED_FILES" ] && FILES="$FILES$UNTRACKED_FILES\n" +[ -n "$ADDITIONAL_FILES" ] && FILES="$FILES$ADDITIONAL_FILES" + +# Remove trailing newline and duplicates +FILES=$(echo -e "$FILES" | grep -v '^$' | sort -u) + +if [ -z "$FILES" ]; then + echo "✅ No files to deploy" + exit 0 +fi + +echo "📋 Files to deploy:" +echo "$FILES" | sed 's/^/ - /' +echo "" + +# Create backup directory on target +BACKUP_DIR="$TARGET_EMHTTP/backups/$(date +%Y%m%d_%H%M%S)" +echo "đŸ“Ļ Creating backup directory on target..." +ssh "$TARGET_HOST" "mkdir -p '$BACKUP_DIR'" + +# Deploy each file +while IFS= read -r FILE; do + if [ ! -f "$FILE" ]; then + echo "âš ī¸ Warning: $FILE not found, skipping..." + continue + fi + + FILENAME=$(basename "$FILE") + DIRNAME=$(dirname "$FILE" | sed 's/emhttp\///') + TARGET_PATH="$TARGET_EMHTTP/$DIRNAME/$FILENAME" + + echo "📤 Deploying $FILENAME..." + + # Backup existing file if it exists + ssh "$TARGET_HOST" "[ -f '$TARGET_PATH' ] && cp '$TARGET_PATH' '$BACKUP_DIR/$FILENAME.bak'" + + # Copy the updated file + if scp "$FILE" "$TARGET_HOST:$TARGET_PATH"; then + echo "✅ $FILENAME deployed successfully" + else + echo "❌ Failed to deploy $FILENAME" + exit 1 + fi +done <<< "$FILES" + +echo "" +echo "✨ Deployment complete to $TARGET_HOST!" +echo "📝 Successfully deployed $(echo "$FILES" | wc -l | xargs) modified file(s)" \ No newline at end of file From 1a266008f6200fd5880ef1a2d5be515f2172a8c7 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:31:45 -0400 Subject: [PATCH 2/9] chore: remove added file --- deploy_to_unraid.sh | 105 -------------------------------------------- 1 file changed, 105 deletions(-) delete mode 100755 deploy_to_unraid.sh diff --git a/deploy_to_unraid.sh b/deploy_to_unraid.sh deleted file mode 100755 index 6fe98a4f3..000000000 --- a/deploy_to_unraid.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash - -# Deploy script for unRAID webGUI updates -# Deploys only git-modified files to the target server -# Usage: ./deploy_to_unraid.sh - -# Show help if requested -if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then - echo "Usage: $0 " - echo "" - echo "Deploy git-modified files to unRAID server" - echo "" - echo "Arguments:" - echo " target_host SSH target (required)" - echo "" - echo "Examples:" - echo " $0 root@192.168.1.100 # Deploy to specific IP" - echo " $0 root@tower.local # Deploy to named host" - echo " $0 root@unraid.local # Deploy to unraid.local" - exit 0 -fi - -# Get target host from command line (required) -if [ $# -eq 0 ]; then - echo "❌ Error: Target host required" - echo "Usage: $0 " - echo "Example: $0 root@192.168.1.100" - exit 1 -fi - -TARGET_HOST="$1" -echo "â„šī¸ Deploying to: $TARGET_HOST" - -TARGET_EMHTTP="/usr/local/emhttp" - -echo "🚀 Deploying git-modified files to unRAID..." - -# Check for additional files to deploy (passed as arguments) -ADDITIONAL_FILES="" -if [ $# -gt 1 ]; then - shift # Remove the target host from arguments - for FILE in "$@"; do - if [ -f "$FILE" ]; then - ADDITIONAL_FILES="$ADDITIONAL_FILES$FILE\n" - fi - done -fi - -# Get list of modified files from git (excluding deleted files) -GIT_FILES=$(git diff --name-only --diff-filter=ACMR HEAD | grep -E "^emhttp/" || true) - -# Get list of untracked files -UNTRACKED_FILES=$(git ls-files --others --exclude-standard | grep -E "^emhttp/" || true) - -# Combine all files -FILES="" -[ -n "$GIT_FILES" ] && FILES="$FILES$GIT_FILES\n" -[ -n "$UNTRACKED_FILES" ] && FILES="$FILES$UNTRACKED_FILES\n" -[ -n "$ADDITIONAL_FILES" ] && FILES="$FILES$ADDITIONAL_FILES" - -# Remove trailing newline and duplicates -FILES=$(echo -e "$FILES" | grep -v '^$' | sort -u) - -if [ -z "$FILES" ]; then - echo "✅ No files to deploy" - exit 0 -fi - -echo "📋 Files to deploy:" -echo "$FILES" | sed 's/^/ - /' -echo "" - -# Create backup directory on target -BACKUP_DIR="$TARGET_EMHTTP/backups/$(date +%Y%m%d_%H%M%S)" -echo "đŸ“Ļ Creating backup directory on target..." -ssh "$TARGET_HOST" "mkdir -p '$BACKUP_DIR'" - -# Deploy each file -while IFS= read -r FILE; do - if [ ! -f "$FILE" ]; then - echo "âš ī¸ Warning: $FILE not found, skipping..." - continue - fi - - FILENAME=$(basename "$FILE") - DIRNAME=$(dirname "$FILE" | sed 's/emhttp\///') - TARGET_PATH="$TARGET_EMHTTP/$DIRNAME/$FILENAME" - - echo "📤 Deploying $FILENAME..." - - # Backup existing file if it exists - ssh "$TARGET_HOST" "[ -f '$TARGET_PATH' ] && cp '$TARGET_PATH' '$BACKUP_DIR/$FILENAME.bak'" - - # Copy the updated file - if scp "$FILE" "$TARGET_HOST:$TARGET_PATH"; then - echo "✅ $FILENAME deployed successfully" - else - echo "❌ Failed to deploy $FILENAME" - exit 1 - fi -done <<< "$FILES" - -echo "" -echo "✨ Deployment complete to $TARGET_HOST!" -echo "📝 Successfully deployed $(echo "$FILES" | wc -l | xargs) modified file(s)" \ No newline at end of file From 56eed94cbbe777033959f114f80a763136c7bd9f Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:32:09 -0400 Subject: [PATCH 3/9] chore: change main.page to trigger build --- emhttp/plugins/dynamix/Main.page | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emhttp/plugins/dynamix/Main.page b/emhttp/plugins/dynamix/Main.page index b68dcf2f0..81d5a7c30 100644 --- a/emhttp/plugins/dynamix/Main.page +++ b/emhttp/plugins/dynamix/Main.page @@ -1,4 +1,4 @@ Menu="Tasks:1" Type="xmenu" Code="e908" -Load="30" \ No newline at end of file +Load="30" From 2cee1c26fce5ba1d10d98e3bbf9ac7114580ece3 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:37:40 -0400 Subject: [PATCH 4/9] chore: enhance artifact handling in PR plugin upload workflow --- .github/workflows/pr-plugin-upload.yml | 91 +++++++++++++++++--------- 1 file changed, 61 insertions(+), 30 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 45794bc66..be6086742 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -21,6 +21,10 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Prepare artifact extraction directory + run: | + mkdir -p "${{ runner.temp }}/artifacts/" + - name: Download artifacts from build workflow uses: actions/github-script@v7 with: @@ -48,17 +52,19 @@ jobs: }); let fs = require('fs'); - fs.writeFileSync('artifacts.zip', Buffer.from(download.data)); + // Write to secure temp location + const zipPath = process.env['RUNNER_TEMP'] + '/artifacts/artifacts.zip'; + fs.writeFileSync(zipPath, Buffer.from(download.data)); core.setOutput('artifact_name', matchArtifact.name); - name: Extract artifacts run: | - unzip artifacts.zip - ls -la + unzip "${{ runner.temp }}/artifacts/artifacts.zip" -d "${{ runner.temp }}/artifacts/" + ls -la "${{ runner.temp }}/artifacts/" # Check if metadata exists - if [ ! -f "pr-metadata.json" ]; then + if [ ! -f "${{ runner.temp }}/artifacts/pr-metadata.json" ]; then echo "No metadata file found, build may not have produced any changes" echo "has_artifacts=false" >> "$GITHUB_ENV" exit 0 @@ -68,19 +74,19 @@ jobs: # Extract metadata echo "Metadata contents:" - cat pr-metadata.json + cat "${{ runner.temp }}/artifacts/pr-metadata.json" - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | # Extract values from metadata - PR_NUMBER=$(jq -r '.pr_number' pr-metadata.json) - VERSION=$(jq -r '.version' pr-metadata.json) - PR_VERSION=$(jq -r '.pr_version' pr-metadata.json) - LOCAL_TXZ=$(jq -r '.local_txz' pr-metadata.json) - REMOTE_TXZ=$(jq -r '.remote_txz' pr-metadata.json) - PLUGIN_NAME=$(jq -r '.plugin_name' pr-metadata.json) + PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/pr-metadata.json") + VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/pr-metadata.json") + PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/pr-metadata.json") + LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") + REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") + PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/pr-metadata.json") # Generate R2 URLs and keys S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" @@ -102,9 +108,9 @@ jobs: echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT # Also extract changed files for comment - jq -r '.changed_files[]' pr-metadata.json > changed_files.txt + jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/pr-metadata.json" > "${{ runner.temp }}/artifacts/changed_files.txt" echo "Changed files:" - cat changed_files.txt + cat "${{ runner.temp }}/artifacts/changed_files.txt" - name: Configure AWS CLI for R2 if: env.has_artifacts == 'true' @@ -115,38 +121,61 @@ jobs: - name: Upload TXZ to R2 if: env.has_artifacts == 'true' + env: + LOCAL_TXZ: ${{ steps.metadata.outputs.local_txz }} + TXZ_KEY: ${{ steps.metadata.outputs.txz_key }} + CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} + CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} + TXZ_URL: ${{ steps.metadata.outputs.txz_url }} run: | + # Copy from temp directory to working directory + cp "${{ runner.temp }}/artifacts/$LOCAL_TXZ" "./" + # Upload to R2 with versioned filename - aws s3 cp "${{ steps.metadata.outputs.local_txz }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.txz_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + aws s3 cp "$LOCAL_TXZ" \ + "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$TXZ_KEY" \ + --endpoint-url "$CLOUDFLARE_S3_URL" \ --acl public-read - echo "Uploaded TXZ to: ${{ steps.metadata.outputs.txz_url }}" + echo "Uploaded TXZ to: $TXZ_URL" - name: Regenerate plugin file with correct R2 URLs if: env.has_artifacts == 'true' + env: + VERSION: ${{ steps.metadata.outputs.version }} + PR_NUMBER: ${{ steps.metadata.outputs.pr_number }} + PR_VERSION: ${{ steps.metadata.outputs.pr_version }} + LOCAL_TXZ: ${{ steps.metadata.outputs.local_txz }} + REMOTE_TXZ: ${{ steps.metadata.outputs.remote_txz }} + TXZ_URL: ${{ steps.metadata.outputs.txz_url }} + PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | # Regenerate the plugin with the actual R2 URLs bash .github/scripts/generate-pr-plugin.sh \ - "${{ steps.metadata.outputs.version }}" \ - "${{ steps.metadata.outputs.pr_number }}" \ - "$(echo '${{ steps.metadata.outputs.pr_version }}' | cut -d. -f3)" \ - "${{ steps.metadata.outputs.local_txz }}" \ - "${{ steps.metadata.outputs.remote_txz }}" \ - "${{ steps.metadata.outputs.txz_url }}" \ - "${{ steps.metadata.outputs.plugin_url }}" + "$VERSION" \ + "$PR_NUMBER" \ + "$(echo "$PR_VERSION" | cut -d. -f3)" \ + "$LOCAL_TXZ" \ + "$REMOTE_TXZ" \ + "$TXZ_URL" \ + "$PLUGIN_URL" - name: Upload PLG to R2 if: env.has_artifacts == 'true' + env: + PLUGIN_NAME: ${{ steps.metadata.outputs.plugin_name }} + PLUGIN_KEY: ${{ steps.metadata.outputs.plugin_key }} + CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} + CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} + PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | # Upload PLG - overwrite existing for updates - aws s3 cp "${{ steps.metadata.outputs.plugin_name }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.plugin_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + aws s3 cp "$PLUGIN_NAME" \ + "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$PLUGIN_KEY" \ + --endpoint-url "$CLOUDFLARE_S3_URL" \ --acl public-read - echo "Uploaded PLG to: ${{ steps.metadata.outputs.plugin_url }}" + echo "Uploaded PLG to: $PLUGIN_URL" - name: Format changed files list if: env.has_artifacts == 'true' @@ -154,16 +183,18 @@ jobs: run: | # Format the file list for the comment echo "files<> $GITHUB_OUTPUT - cat changed_files.txt >> $GITHUB_OUTPUT + cat "${{ runner.temp }}/artifacts/changed_files.txt" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - name: Get PR info if: env.has_artifacts == 'true' id: pr-info uses: actions/github-script@v7 + env: + PR_NUMBER: ${{ steps.metadata.outputs.pr_number }} with: script: | - const pr_number = ${{ steps.metadata.outputs.pr_number }}; + const pr_number = parseInt(process.env.PR_NUMBER); const pr = await github.rest.pulls.get({ owner: context.repo.owner, repo: context.repo.repo, From 07f841baf6aa9af407fd8552609d8f1a6664a864 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:48:31 -0400 Subject: [PATCH 5/9] chore: refactor PR plugin build workflow to generate plugin file with placeholder URLs --- .github/workflows/pr-plugin-build.yml | 28 ++++----------------------- 1 file changed, 4 insertions(+), 24 deletions(-) diff --git a/.github/workflows/pr-plugin-build.yml b/.github/workflows/pr-plugin-build.yml index d14013e2a..12e626aee 100644 --- a/.github/workflows/pr-plugin-build.yml +++ b/.github/workflows/pr-plugin-build.yml @@ -109,38 +109,18 @@ jobs: echo "Tarball contents:" tar -tzf ${{ steps.version.outputs.local_txz }} - - name: Configure AWS CLI for R2 + - name: Generate plugin file if: steps.changed-files.outputs.has_changes == 'true' run: | - aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} - aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} - aws configure set region auto - - - name: Upload TXZ to R2 - if: steps.changed-files.outputs.has_changes == 'true' - id: upload-txz - run: | - # Upload to R2 with versioned filename to prevent SHA conflicts - aws s3 cp "${{ steps.version.outputs.local_txz }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.version.outputs.txz_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ - --acl public-read - - echo "Uploaded TXZ to: ${{ steps.version.outputs.txz_url }}" - - - name: Generate plugin file with R2 URL - if: steps.changed-files.outputs.has_changes == 'true' - run: | - # Local file is non-versioned, but remote URL is versioned - # Pass local filename for SHA calculation and remote filename for download + # Generate with placeholder URLs - will be updated by upload workflow bash .github/scripts/generate-pr-plugin.sh \ "${{ steps.version.outputs.version }}" \ "${{ github.event.pull_request.number }}" \ "$(git rev-parse --short HEAD)" \ "${{ steps.version.outputs.local_txz }}" \ "${{ steps.version.outputs.remote_txz }}" \ - "${{ steps.version.outputs.txz_url }}" \ - "${{ steps.version.outputs.plugin_url }}" + "PENDING_UPLOAD" \ + "PENDING_UPLOAD" - name: Save metadata for upload workflow if: steps.changed-files.outputs.has_changes == 'true' From 23a5d71e8119bc3a03b7bf734d04f822f3e2ae77 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:51:21 -0400 Subject: [PATCH 6/9] chore: revert main.page --- emhttp/plugins/dynamix/Main.page | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emhttp/plugins/dynamix/Main.page b/emhttp/plugins/dynamix/Main.page index 81d5a7c30..b68dcf2f0 100644 --- a/emhttp/plugins/dynamix/Main.page +++ b/emhttp/plugins/dynamix/Main.page @@ -1,4 +1,4 @@ Menu="Tasks:1" Type="xmenu" Code="e908" -Load="30" +Load="30" \ No newline at end of file From 620a01fa6da2a5c62902ed530a93e3968a5a2a70 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:54:49 -0400 Subject: [PATCH 7/9] chore: security fixes around workflow --- .github/workflows/pr-plugin-upload.yml | 58 ++++++++++++++++---------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index be6086742..3cca1cdf6 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -60,11 +60,22 @@ jobs: - name: Extract artifacts run: | - unzip "${{ runner.temp }}/artifacts/artifacts.zip" -d "${{ runner.temp }}/artifacts/" - ls -la "${{ runner.temp }}/artifacts/" + mkdir -p "${{ runner.temp }}/artifacts/unpacked" + + # Validate archive contents before extraction + bsdtar -tf "${{ runner.temp }}/artifacts/artifacts.zip" | awk ' + /^-/ {next} + { + if ($0 ~ /^\// || $0 ~ /\.\.\//) { print "INVALID:"$0 > "/dev/stderr"; exit 1 } + } + ' + + # Safe extraction with path normalization + bsdtar -xpf "${{ runner.temp }}/artifacts/artifacts.zip" -C "${{ runner.temp }}/artifacts/unpacked" --no-same-owner --no-same-permissions + ls -la "${{ runner.temp }}/artifacts/unpacked" # Check if metadata exists - if [ ! -f "${{ runner.temp }}/artifacts/pr-metadata.json" ]; then + if [ ! -f "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" ]; then echo "No metadata file found, build may not have produced any changes" echo "has_artifacts=false" >> "$GITHUB_ENV" exit 0 @@ -74,19 +85,19 @@ jobs: # Extract metadata echo "Metadata contents:" - cat "${{ runner.temp }}/artifacts/pr-metadata.json" + cat "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | # Extract values from metadata - PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/pr-metadata.json") - VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/pr-metadata.json") - PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/pr-metadata.json") - LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") - REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") - PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/pr-metadata.json") + PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") # Generate R2 URLs and keys S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" @@ -108,16 +119,9 @@ jobs: echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT # Also extract changed files for comment - jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/pr-metadata.json" > "${{ runner.temp }}/artifacts/changed_files.txt" + jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" echo "Changed files:" - cat "${{ runner.temp }}/artifacts/changed_files.txt" - - - name: Configure AWS CLI for R2 - if: env.has_artifacts == 'true' - run: | - aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} - aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} - aws configure set region auto + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" - name: Upload TXZ to R2 if: env.has_artifacts == 'true' @@ -127,9 +131,15 @@ jobs: CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} TXZ_URL: ${{ steps.metadata.outputs.txz_url }} + AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + AWS_EC2_METADATA_DISABLED: true + AWS_SHARED_CREDENTIALS_FILE: /dev/null + AWS_CONFIG_FILE: /dev/null run: | # Copy from temp directory to working directory - cp "${{ runner.temp }}/artifacts/$LOCAL_TXZ" "./" + cp "${{ runner.temp }}/artifacts/unpacked/$LOCAL_TXZ" "./" # Upload to R2 with versioned filename aws s3 cp "$LOCAL_TXZ" \ @@ -168,6 +178,12 @@ jobs: CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} + AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + AWS_EC2_METADATA_DISABLED: true + AWS_SHARED_CREDENTIALS_FILE: /dev/null + AWS_CONFIG_FILE: /dev/null run: | # Upload PLG - overwrite existing for updates aws s3 cp "$PLUGIN_NAME" \ @@ -183,7 +199,7 @@ jobs: run: | # Format the file list for the comment echo "files<> $GITHUB_OUTPUT - cat "${{ runner.temp }}/artifacts/changed_files.txt" >> $GITHUB_OUTPUT + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - name: Get PR info From 3e8439cbe1418070c49473681aea0c3fac137b41 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 09:02:27 -0400 Subject: [PATCH 8/9] chore: improve PR plugin upload workflow with concurrency and enhanced error handling --- .github/workflows/pr-plugin-upload.yml | 54 ++++++++++++++++++++------ 1 file changed, 42 insertions(+), 12 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 3cca1cdf6..3ec82f46d 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -1,5 +1,9 @@ name: Upload PR Plugin to R2 +concurrency: + group: pr-plugin-${{ github.event.workflow_run.id || github.run_id }} + cancel-in-progress: true + on: workflow_run: workflows: ["Build PR Plugin"] @@ -16,6 +20,11 @@ jobs: runs-on: ubuntu-latest # Only run if the build workflow succeeded if: ${{ github.event.workflow_run.conclusion == 'success' }} + defaults: + run: + shell: bash + env: + SHELLOPTS: errexit:pipefail steps: - name: Checkout code @@ -23,6 +32,8 @@ jobs: - name: Prepare artifact extraction directory run: | + set -Eeuo pipefail + IFS=$'\n\t' mkdir -p "${{ runner.temp }}/artifacts/" - name: Download artifacts from build workflow @@ -60,6 +71,8 @@ jobs: - name: Extract artifacts run: | + set -Eeuo pipefail + IFS=$'\n\t' mkdir -p "${{ runner.temp }}/artifacts/unpacked" # Validate archive contents before extraction @@ -83,14 +96,15 @@ jobs: echo "has_artifacts=true" >> "$GITHUB_ENV" - # Extract metadata - echo "Metadata contents:" - cat "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" + # Validate metadata schema + echo "Metadata present; proceeding with schema validation." - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | + set -Eeuo pipefail + IFS=$'\n\t' # Extract values from metadata PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") @@ -118,10 +132,15 @@ jobs: echo "txz_key=$TXZ_KEY" >> $GITHUB_OUTPUT echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT - # Also extract changed files for comment - jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" - echo "Changed files:" - cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + # Also extract changed files for comment (limit to 100 files) + jq -r '.changed_files[:100][]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + FILE_COUNT=$(jq '.changed_files | length' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + if [ "$FILE_COUNT" -gt 100 ]; then + echo "Note: Showing first 100 of $FILE_COUNT changed files" + echo "truncated=true" >> $GITHUB_OUTPUT + else + echo "truncated=false" >> $GITHUB_OUTPUT + fi - name: Upload TXZ to R2 if: env.has_artifacts == 'true' @@ -138,6 +157,8 @@ jobs: AWS_SHARED_CREDENTIALS_FILE: /dev/null AWS_CONFIG_FILE: /dev/null run: | + set -Eeuo pipefail + IFS=$'\n\t' # Copy from temp directory to working directory cp "${{ runner.temp }}/artifacts/unpacked/$LOCAL_TXZ" "./" @@ -160,6 +181,8 @@ jobs: TXZ_URL: ${{ steps.metadata.outputs.txz_url }} PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | + set -Eeuo pipefail + IFS=$'\n\t' # Regenerate the plugin with the actual R2 URLs bash .github/scripts/generate-pr-plugin.sh \ "$VERSION" \ @@ -185,6 +208,8 @@ jobs: AWS_SHARED_CREDENTIALS_FILE: /dev/null AWS_CONFIG_FILE: /dev/null run: | + set -Eeuo pipefail + IFS=$'\n\t' # Upload PLG - overwrite existing for updates aws s3 cp "$PLUGIN_NAME" \ "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$PLUGIN_KEY" \ @@ -197,10 +222,15 @@ jobs: if: env.has_artifacts == 'true' id: format-files run: | - # Format the file list for the comment - echo "files<> $GITHUB_OUTPUT - cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT + set -Eeuo pipefail + IFS=$'\n\t' + # Format the file list for the comment with random delimiter + DELIM="FILES_$(openssl rand -hex 8)" + { + echo "files<<$DELIM" + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + echo "$DELIM" + } >> "$GITHUB_OUTPUT" - name: Get PR info if: env.has_artifacts == 'true' @@ -272,4 +302,4 @@ jobs: ``` --- - 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file + 🤖 This comment is automatically generated and will be updated with each new push to this PR. From 194c6c146779ec8f14db1dec1924d23e0f81b98f Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 11:11:27 -0400 Subject: [PATCH 9/9] chore: enhance PR plugin upload workflow with improved concurrency handling and security measures --- .github/workflows/pr-plugin-upload.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 3ec82f46d..608fb17d2 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -1,7 +1,9 @@ name: Upload PR Plugin to R2 concurrency: - group: pr-plugin-${{ github.event.workflow_run.id || github.run_id }} + # Use the PR number from the workflow run to group uploads for the same PR + # This ensures previous in-progress uploads for the same PR are cancelled + group: pr-plugin-${{ github.event.workflow_run.pull_requests[0].number || github.event.workflow_run.head_branch }} cancel-in-progress: true on: @@ -29,6 +31,12 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + with: + # SECURITY: Always checkout the default branch (trusted code) + # Never checkout PR code in workflow_run context + ref: ${{ github.event.repository.default_branch }} + # Ensure we're checking out the base repository, not a fork + repository: ${{ github.repository }} - name: Prepare artifact extraction directory run: |