From e234d9e9d701ba5a3a9bc25c4727bb68b0f9311e Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:30:42 -0400 Subject: [PATCH 01/12] fix: allow forks to build PR plugins --- .github/workflows/pr-plugin-build.yml | 99 +++-------- .github/workflows/pr-plugin-upload.yml | 228 +++++++++++++++++++++++++ deploy_to_unraid.sh | 105 ++++++++++++ 3 files changed, 353 insertions(+), 79 deletions(-) create mode 100644 .github/workflows/pr-plugin-upload.yml create mode 100755 deploy_to_unraid.sh diff --git a/.github/workflows/pr-plugin-build.yml b/.github/workflows/pr-plugin-build.yml index eee988ea4..d14013e2a 100644 --- a/.github/workflows/pr-plugin-build.yml +++ b/.github/workflows/pr-plugin-build.yml @@ -9,7 +9,7 @@ on: permissions: contents: read - pull-requests: write + pull-requests: read actions: read jobs: @@ -142,19 +142,25 @@ jobs: "${{ steps.version.outputs.txz_url }}" \ "${{ steps.version.outputs.plugin_url }}" - - name: Upload PLG to R2 + - name: Save metadata for upload workflow if: steps.changed-files.outputs.has_changes == 'true' - id: upload-plg run: | - # Upload PLG - overwrite existing for updates (consistent filename) - aws s3 cp "${{ steps.version.outputs.plugin_name }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.version.outputs.plugin_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ - --acl public-read - - echo "Uploaded PLG to: ${{ steps.version.outputs.plugin_url }}" - - - name: Upload artifacts to GitHub (backup) + cat > pr-metadata.json << EOF + { + "pr_number": ${{ github.event.pull_request.number }}, + "version": "${{ steps.version.outputs.version }}", + "pr_version": "${{ steps.version.outputs.pr_version }}", + "local_txz": "${{ steps.version.outputs.local_txz }}", + "remote_txz": "${{ steps.version.outputs.remote_txz }}", + "plugin_name": "${{ steps.version.outputs.plugin_name }}", + "changed_files": $(cat changed_files.txt | jq -R -s -c 'split("\n") | map(select(length > 0))') + } + EOF + + echo "Metadata saved:" + cat pr-metadata.json + + - name: Upload artifacts to GitHub if: steps.changed-files.outputs.has_changes == 'true' uses: actions/upload-artifact@v4 with: @@ -162,72 +168,7 @@ jobs: path: | webgui-pr-*.plg webgui-pr-*.tar.gz + pr-metadata.json + changed_files.txt retention-days: 30 - - name: Format changed files list - if: steps.changed-files.outputs.has_changes == 'true' - id: format-files - run: | - # Format the file list for the comment - echo "files<> $GITHUB_OUTPUT - cat changed_files.txt >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - - # Debug output - echo "Changed files found:" - cat changed_files.txt - - - name: Comment on PR - if: steps.changed-files.outputs.has_changes == 'true' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: pr-plugin - message: | - ## 🔧 PR Test Plugin Available - - A test plugin has been generated for this PR that includes the modified files. - - **Version:** `${{ steps.version.outputs.version }}` - **Build:** [View Workflow Run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) - - ### đŸ“Ĩ Installation Instructions: - - **Install via Unraid Web UI:** - 1. Go to **Plugins → Install Plugin** - 2. Copy and paste this URL: - ``` - ${{ steps.version.outputs.plugin_url }} - ``` - 3. Click **Install** - - **Alternative: Direct Download** - - [đŸ“Ļ Download PLG](${{ steps.version.outputs.plugin_url }}) - - [đŸ“Ļ Download TXZ](${{ steps.version.outputs.txz_url }}) - - ### âš ī¸ Important Notes: - - - **Testing only:** This plugin is for testing PR changes - - **Backup included:** Original files are automatically backed up - - **Easy removal:** Files are restored when plugin is removed - - **Conflicts:** Remove this plugin before installing production updates - - ### 📝 Modified Files: - -
- Click to expand file list - - ``` - ${{ steps.format-files.outputs.files }} - ``` - -
- - ### 🔄 To Remove: - - Navigate to Plugins → Installed Plugins and remove `webgui-pr-${{ steps.version.outputs.version }}`, or run: - ```bash - plugin remove webgui-pr-${{ steps.version.outputs.version }} - ``` - - --- - 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml new file mode 100644 index 000000000..45794bc66 --- /dev/null +++ b/.github/workflows/pr-plugin-upload.yml @@ -0,0 +1,228 @@ +name: Upload PR Plugin to R2 + +on: + workflow_run: + workflows: ["Build PR Plugin"] + types: + - completed + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + upload-to-r2: + runs-on: ubuntu-latest + # Only run if the build workflow succeeded + if: ${{ github.event.workflow_run.conclusion == 'success' }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Download artifacts from build workflow + uses: actions/github-script@v7 + with: + script: | + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{ github.event.workflow_run.id }}, + }); + + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name.startsWith('webgui-pr-plugin-') + })[0]; + + if (!matchArtifact) { + core.setFailed('No artifacts found from build workflow'); + return; + } + + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + + let fs = require('fs'); + fs.writeFileSync('artifacts.zip', Buffer.from(download.data)); + + core.setOutput('artifact_name', matchArtifact.name); + + - name: Extract artifacts + run: | + unzip artifacts.zip + ls -la + + # Check if metadata exists + if [ ! -f "pr-metadata.json" ]; then + echo "No metadata file found, build may not have produced any changes" + echo "has_artifacts=false" >> "$GITHUB_ENV" + exit 0 + fi + + echo "has_artifacts=true" >> "$GITHUB_ENV" + + # Extract metadata + echo "Metadata contents:" + cat pr-metadata.json + + - name: Parse metadata + if: env.has_artifacts == 'true' + id: metadata + run: | + # Extract values from metadata + PR_NUMBER=$(jq -r '.pr_number' pr-metadata.json) + VERSION=$(jq -r '.version' pr-metadata.json) + PR_VERSION=$(jq -r '.pr_version' pr-metadata.json) + LOCAL_TXZ=$(jq -r '.local_txz' pr-metadata.json) + REMOTE_TXZ=$(jq -r '.remote_txz' pr-metadata.json) + PLUGIN_NAME=$(jq -r '.plugin_name' pr-metadata.json) + + # Generate R2 URLs and keys + S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" + TXZ_URL="${S3_BASE_URL}/${REMOTE_TXZ}" + PLUGIN_URL="${S3_BASE_URL}/${PLUGIN_NAME}" + TXZ_KEY="pr-plugins/pr-${PR_NUMBER}/${REMOTE_TXZ}" + PLUGIN_KEY="pr-plugins/pr-${PR_NUMBER}/${PLUGIN_NAME}" + + # Output for next steps + echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "pr_version=$PR_VERSION" >> $GITHUB_OUTPUT + echo "local_txz=$LOCAL_TXZ" >> $GITHUB_OUTPUT + echo "remote_txz=$REMOTE_TXZ" >> $GITHUB_OUTPUT + echo "plugin_name=$PLUGIN_NAME" >> $GITHUB_OUTPUT + echo "txz_url=$TXZ_URL" >> $GITHUB_OUTPUT + echo "plugin_url=$PLUGIN_URL" >> $GITHUB_OUTPUT + echo "txz_key=$TXZ_KEY" >> $GITHUB_OUTPUT + echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT + + # Also extract changed files for comment + jq -r '.changed_files[]' pr-metadata.json > changed_files.txt + echo "Changed files:" + cat changed_files.txt + + - name: Configure AWS CLI for R2 + if: env.has_artifacts == 'true' + run: | + aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + aws configure set region auto + + - name: Upload TXZ to R2 + if: env.has_artifacts == 'true' + run: | + # Upload to R2 with versioned filename + aws s3 cp "${{ steps.metadata.outputs.local_txz }}" \ + "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.txz_key }}" \ + --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + --acl public-read + + echo "Uploaded TXZ to: ${{ steps.metadata.outputs.txz_url }}" + + - name: Regenerate plugin file with correct R2 URLs + if: env.has_artifacts == 'true' + run: | + # Regenerate the plugin with the actual R2 URLs + bash .github/scripts/generate-pr-plugin.sh \ + "${{ steps.metadata.outputs.version }}" \ + "${{ steps.metadata.outputs.pr_number }}" \ + "$(echo '${{ steps.metadata.outputs.pr_version }}' | cut -d. -f3)" \ + "${{ steps.metadata.outputs.local_txz }}" \ + "${{ steps.metadata.outputs.remote_txz }}" \ + "${{ steps.metadata.outputs.txz_url }}" \ + "${{ steps.metadata.outputs.plugin_url }}" + + - name: Upload PLG to R2 + if: env.has_artifacts == 'true' + run: | + # Upload PLG - overwrite existing for updates + aws s3 cp "${{ steps.metadata.outputs.plugin_name }}" \ + "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.plugin_key }}" \ + --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + --acl public-read + + echo "Uploaded PLG to: ${{ steps.metadata.outputs.plugin_url }}" + + - name: Format changed files list + if: env.has_artifacts == 'true' + id: format-files + run: | + # Format the file list for the comment + echo "files<> $GITHUB_OUTPUT + cat changed_files.txt >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + - name: Get PR info + if: env.has_artifacts == 'true' + id: pr-info + uses: actions/github-script@v7 + with: + script: | + const pr_number = ${{ steps.metadata.outputs.pr_number }}; + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: pr_number + }); + core.setOutput('pr_number', pr_number); + + - name: Comment on PR + if: env.has_artifacts == 'true' + uses: marocchino/sticky-pull-request-comment@v2 + with: + number: ${{ steps.pr-info.outputs.pr_number }} + header: pr-plugin + message: | + ## 🔧 PR Test Plugin Available + + A test plugin has been generated for this PR that includes the modified files. + + **Version:** `${{ steps.metadata.outputs.version }}` + **Build:** [View Workflow Run](${{ github.event.workflow_run.html_url }}) + + ### đŸ“Ĩ Installation Instructions: + + **Install via Unraid Web UI:** + 1. Go to **Plugins → Install Plugin** + 2. Copy and paste this URL: + ``` + ${{ steps.metadata.outputs.plugin_url }} + ``` + 3. Click **Install** + + **Alternative: Direct Download** + - [đŸ“Ļ Download PLG](${{ steps.metadata.outputs.plugin_url }}) + - [đŸ“Ļ Download TXZ](${{ steps.metadata.outputs.txz_url }}) + + ### âš ī¸ Important Notes: + + - **Testing only:** This plugin is for testing PR changes + - **Backup included:** Original files are automatically backed up + - **Easy removal:** Files are restored when plugin is removed + - **Conflicts:** Remove this plugin before installing production updates + + ### 📝 Modified Files: + +
+ Click to expand file list + + ``` + ${{ steps.format-files.outputs.files }} + ``` + +
+ + ### 🔄 To Remove: + + Navigate to Plugins → Installed Plugins and remove `webgui-pr-${{ steps.metadata.outputs.version }}`, or run: + ```bash + plugin remove webgui-pr-${{ steps.metadata.outputs.version }} + ``` + + --- + 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file diff --git a/deploy_to_unraid.sh b/deploy_to_unraid.sh new file mode 100755 index 000000000..6fe98a4f3 --- /dev/null +++ b/deploy_to_unraid.sh @@ -0,0 +1,105 @@ +#!/bin/bash + +# Deploy script for unRAID webGUI updates +# Deploys only git-modified files to the target server +# Usage: ./deploy_to_unraid.sh + +# Show help if requested +if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then + echo "Usage: $0 " + echo "" + echo "Deploy git-modified files to unRAID server" + echo "" + echo "Arguments:" + echo " target_host SSH target (required)" + echo "" + echo "Examples:" + echo " $0 root@192.168.1.100 # Deploy to specific IP" + echo " $0 root@tower.local # Deploy to named host" + echo " $0 root@unraid.local # Deploy to unraid.local" + exit 0 +fi + +# Get target host from command line (required) +if [ $# -eq 0 ]; then + echo "❌ Error: Target host required" + echo "Usage: $0 " + echo "Example: $0 root@192.168.1.100" + exit 1 +fi + +TARGET_HOST="$1" +echo "â„šī¸ Deploying to: $TARGET_HOST" + +TARGET_EMHTTP="/usr/local/emhttp" + +echo "🚀 Deploying git-modified files to unRAID..." + +# Check for additional files to deploy (passed as arguments) +ADDITIONAL_FILES="" +if [ $# -gt 1 ]; then + shift # Remove the target host from arguments + for FILE in "$@"; do + if [ -f "$FILE" ]; then + ADDITIONAL_FILES="$ADDITIONAL_FILES$FILE\n" + fi + done +fi + +# Get list of modified files from git (excluding deleted files) +GIT_FILES=$(git diff --name-only --diff-filter=ACMR HEAD | grep -E "^emhttp/" || true) + +# Get list of untracked files +UNTRACKED_FILES=$(git ls-files --others --exclude-standard | grep -E "^emhttp/" || true) + +# Combine all files +FILES="" +[ -n "$GIT_FILES" ] && FILES="$FILES$GIT_FILES\n" +[ -n "$UNTRACKED_FILES" ] && FILES="$FILES$UNTRACKED_FILES\n" +[ -n "$ADDITIONAL_FILES" ] && FILES="$FILES$ADDITIONAL_FILES" + +# Remove trailing newline and duplicates +FILES=$(echo -e "$FILES" | grep -v '^$' | sort -u) + +if [ -z "$FILES" ]; then + echo "✅ No files to deploy" + exit 0 +fi + +echo "📋 Files to deploy:" +echo "$FILES" | sed 's/^/ - /' +echo "" + +# Create backup directory on target +BACKUP_DIR="$TARGET_EMHTTP/backups/$(date +%Y%m%d_%H%M%S)" +echo "đŸ“Ļ Creating backup directory on target..." +ssh "$TARGET_HOST" "mkdir -p '$BACKUP_DIR'" + +# Deploy each file +while IFS= read -r FILE; do + if [ ! -f "$FILE" ]; then + echo "âš ī¸ Warning: $FILE not found, skipping..." + continue + fi + + FILENAME=$(basename "$FILE") + DIRNAME=$(dirname "$FILE" | sed 's/emhttp\///') + TARGET_PATH="$TARGET_EMHTTP/$DIRNAME/$FILENAME" + + echo "📤 Deploying $FILENAME..." + + # Backup existing file if it exists + ssh "$TARGET_HOST" "[ -f '$TARGET_PATH' ] && cp '$TARGET_PATH' '$BACKUP_DIR/$FILENAME.bak'" + + # Copy the updated file + if scp "$FILE" "$TARGET_HOST:$TARGET_PATH"; then + echo "✅ $FILENAME deployed successfully" + else + echo "❌ Failed to deploy $FILENAME" + exit 1 + fi +done <<< "$FILES" + +echo "" +echo "✨ Deployment complete to $TARGET_HOST!" +echo "📝 Successfully deployed $(echo "$FILES" | wc -l | xargs) modified file(s)" \ No newline at end of file From 1a266008f6200fd5880ef1a2d5be515f2172a8c7 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:31:45 -0400 Subject: [PATCH 02/12] chore: remove added file --- deploy_to_unraid.sh | 105 -------------------------------------------- 1 file changed, 105 deletions(-) delete mode 100755 deploy_to_unraid.sh diff --git a/deploy_to_unraid.sh b/deploy_to_unraid.sh deleted file mode 100755 index 6fe98a4f3..000000000 --- a/deploy_to_unraid.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash - -# Deploy script for unRAID webGUI updates -# Deploys only git-modified files to the target server -# Usage: ./deploy_to_unraid.sh - -# Show help if requested -if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then - echo "Usage: $0 " - echo "" - echo "Deploy git-modified files to unRAID server" - echo "" - echo "Arguments:" - echo " target_host SSH target (required)" - echo "" - echo "Examples:" - echo " $0 root@192.168.1.100 # Deploy to specific IP" - echo " $0 root@tower.local # Deploy to named host" - echo " $0 root@unraid.local # Deploy to unraid.local" - exit 0 -fi - -# Get target host from command line (required) -if [ $# -eq 0 ]; then - echo "❌ Error: Target host required" - echo "Usage: $0 " - echo "Example: $0 root@192.168.1.100" - exit 1 -fi - -TARGET_HOST="$1" -echo "â„šī¸ Deploying to: $TARGET_HOST" - -TARGET_EMHTTP="/usr/local/emhttp" - -echo "🚀 Deploying git-modified files to unRAID..." - -# Check for additional files to deploy (passed as arguments) -ADDITIONAL_FILES="" -if [ $# -gt 1 ]; then - shift # Remove the target host from arguments - for FILE in "$@"; do - if [ -f "$FILE" ]; then - ADDITIONAL_FILES="$ADDITIONAL_FILES$FILE\n" - fi - done -fi - -# Get list of modified files from git (excluding deleted files) -GIT_FILES=$(git diff --name-only --diff-filter=ACMR HEAD | grep -E "^emhttp/" || true) - -# Get list of untracked files -UNTRACKED_FILES=$(git ls-files --others --exclude-standard | grep -E "^emhttp/" || true) - -# Combine all files -FILES="" -[ -n "$GIT_FILES" ] && FILES="$FILES$GIT_FILES\n" -[ -n "$UNTRACKED_FILES" ] && FILES="$FILES$UNTRACKED_FILES\n" -[ -n "$ADDITIONAL_FILES" ] && FILES="$FILES$ADDITIONAL_FILES" - -# Remove trailing newline and duplicates -FILES=$(echo -e "$FILES" | grep -v '^$' | sort -u) - -if [ -z "$FILES" ]; then - echo "✅ No files to deploy" - exit 0 -fi - -echo "📋 Files to deploy:" -echo "$FILES" | sed 's/^/ - /' -echo "" - -# Create backup directory on target -BACKUP_DIR="$TARGET_EMHTTP/backups/$(date +%Y%m%d_%H%M%S)" -echo "đŸ“Ļ Creating backup directory on target..." -ssh "$TARGET_HOST" "mkdir -p '$BACKUP_DIR'" - -# Deploy each file -while IFS= read -r FILE; do - if [ ! -f "$FILE" ]; then - echo "âš ī¸ Warning: $FILE not found, skipping..." - continue - fi - - FILENAME=$(basename "$FILE") - DIRNAME=$(dirname "$FILE" | sed 's/emhttp\///') - TARGET_PATH="$TARGET_EMHTTP/$DIRNAME/$FILENAME" - - echo "📤 Deploying $FILENAME..." - - # Backup existing file if it exists - ssh "$TARGET_HOST" "[ -f '$TARGET_PATH' ] && cp '$TARGET_PATH' '$BACKUP_DIR/$FILENAME.bak'" - - # Copy the updated file - if scp "$FILE" "$TARGET_HOST:$TARGET_PATH"; then - echo "✅ $FILENAME deployed successfully" - else - echo "❌ Failed to deploy $FILENAME" - exit 1 - fi -done <<< "$FILES" - -echo "" -echo "✨ Deployment complete to $TARGET_HOST!" -echo "📝 Successfully deployed $(echo "$FILES" | wc -l | xargs) modified file(s)" \ No newline at end of file From 56eed94cbbe777033959f114f80a763136c7bd9f Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:32:09 -0400 Subject: [PATCH 03/12] chore: change main.page to trigger build --- emhttp/plugins/dynamix/Main.page | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emhttp/plugins/dynamix/Main.page b/emhttp/plugins/dynamix/Main.page index b68dcf2f0..81d5a7c30 100644 --- a/emhttp/plugins/dynamix/Main.page +++ b/emhttp/plugins/dynamix/Main.page @@ -1,4 +1,4 @@ Menu="Tasks:1" Type="xmenu" Code="e908" -Load="30" \ No newline at end of file +Load="30" From 2cee1c26fce5ba1d10d98e3bbf9ac7114580ece3 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:37:40 -0400 Subject: [PATCH 04/12] chore: enhance artifact handling in PR plugin upload workflow --- .github/workflows/pr-plugin-upload.yml | 91 +++++++++++++++++--------- 1 file changed, 61 insertions(+), 30 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 45794bc66..be6086742 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -21,6 +21,10 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Prepare artifact extraction directory + run: | + mkdir -p "${{ runner.temp }}/artifacts/" + - name: Download artifacts from build workflow uses: actions/github-script@v7 with: @@ -48,17 +52,19 @@ jobs: }); let fs = require('fs'); - fs.writeFileSync('artifacts.zip', Buffer.from(download.data)); + // Write to secure temp location + const zipPath = process.env['RUNNER_TEMP'] + '/artifacts/artifacts.zip'; + fs.writeFileSync(zipPath, Buffer.from(download.data)); core.setOutput('artifact_name', matchArtifact.name); - name: Extract artifacts run: | - unzip artifacts.zip - ls -la + unzip "${{ runner.temp }}/artifacts/artifacts.zip" -d "${{ runner.temp }}/artifacts/" + ls -la "${{ runner.temp }}/artifacts/" # Check if metadata exists - if [ ! -f "pr-metadata.json" ]; then + if [ ! -f "${{ runner.temp }}/artifacts/pr-metadata.json" ]; then echo "No metadata file found, build may not have produced any changes" echo "has_artifacts=false" >> "$GITHUB_ENV" exit 0 @@ -68,19 +74,19 @@ jobs: # Extract metadata echo "Metadata contents:" - cat pr-metadata.json + cat "${{ runner.temp }}/artifacts/pr-metadata.json" - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | # Extract values from metadata - PR_NUMBER=$(jq -r '.pr_number' pr-metadata.json) - VERSION=$(jq -r '.version' pr-metadata.json) - PR_VERSION=$(jq -r '.pr_version' pr-metadata.json) - LOCAL_TXZ=$(jq -r '.local_txz' pr-metadata.json) - REMOTE_TXZ=$(jq -r '.remote_txz' pr-metadata.json) - PLUGIN_NAME=$(jq -r '.plugin_name' pr-metadata.json) + PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/pr-metadata.json") + VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/pr-metadata.json") + PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/pr-metadata.json") + LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") + REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") + PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/pr-metadata.json") # Generate R2 URLs and keys S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" @@ -102,9 +108,9 @@ jobs: echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT # Also extract changed files for comment - jq -r '.changed_files[]' pr-metadata.json > changed_files.txt + jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/pr-metadata.json" > "${{ runner.temp }}/artifacts/changed_files.txt" echo "Changed files:" - cat changed_files.txt + cat "${{ runner.temp }}/artifacts/changed_files.txt" - name: Configure AWS CLI for R2 if: env.has_artifacts == 'true' @@ -115,38 +121,61 @@ jobs: - name: Upload TXZ to R2 if: env.has_artifacts == 'true' + env: + LOCAL_TXZ: ${{ steps.metadata.outputs.local_txz }} + TXZ_KEY: ${{ steps.metadata.outputs.txz_key }} + CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} + CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} + TXZ_URL: ${{ steps.metadata.outputs.txz_url }} run: | + # Copy from temp directory to working directory + cp "${{ runner.temp }}/artifacts/$LOCAL_TXZ" "./" + # Upload to R2 with versioned filename - aws s3 cp "${{ steps.metadata.outputs.local_txz }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.txz_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + aws s3 cp "$LOCAL_TXZ" \ + "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$TXZ_KEY" \ + --endpoint-url "$CLOUDFLARE_S3_URL" \ --acl public-read - echo "Uploaded TXZ to: ${{ steps.metadata.outputs.txz_url }}" + echo "Uploaded TXZ to: $TXZ_URL" - name: Regenerate plugin file with correct R2 URLs if: env.has_artifacts == 'true' + env: + VERSION: ${{ steps.metadata.outputs.version }} + PR_NUMBER: ${{ steps.metadata.outputs.pr_number }} + PR_VERSION: ${{ steps.metadata.outputs.pr_version }} + LOCAL_TXZ: ${{ steps.metadata.outputs.local_txz }} + REMOTE_TXZ: ${{ steps.metadata.outputs.remote_txz }} + TXZ_URL: ${{ steps.metadata.outputs.txz_url }} + PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | # Regenerate the plugin with the actual R2 URLs bash .github/scripts/generate-pr-plugin.sh \ - "${{ steps.metadata.outputs.version }}" \ - "${{ steps.metadata.outputs.pr_number }}" \ - "$(echo '${{ steps.metadata.outputs.pr_version }}' | cut -d. -f3)" \ - "${{ steps.metadata.outputs.local_txz }}" \ - "${{ steps.metadata.outputs.remote_txz }}" \ - "${{ steps.metadata.outputs.txz_url }}" \ - "${{ steps.metadata.outputs.plugin_url }}" + "$VERSION" \ + "$PR_NUMBER" \ + "$(echo "$PR_VERSION" | cut -d. -f3)" \ + "$LOCAL_TXZ" \ + "$REMOTE_TXZ" \ + "$TXZ_URL" \ + "$PLUGIN_URL" - name: Upload PLG to R2 if: env.has_artifacts == 'true' + env: + PLUGIN_NAME: ${{ steps.metadata.outputs.plugin_name }} + PLUGIN_KEY: ${{ steps.metadata.outputs.plugin_key }} + CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} + CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} + PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | # Upload PLG - overwrite existing for updates - aws s3 cp "${{ steps.metadata.outputs.plugin_name }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.metadata.outputs.plugin_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ + aws s3 cp "$PLUGIN_NAME" \ + "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$PLUGIN_KEY" \ + --endpoint-url "$CLOUDFLARE_S3_URL" \ --acl public-read - echo "Uploaded PLG to: ${{ steps.metadata.outputs.plugin_url }}" + echo "Uploaded PLG to: $PLUGIN_URL" - name: Format changed files list if: env.has_artifacts == 'true' @@ -154,16 +183,18 @@ jobs: run: | # Format the file list for the comment echo "files<> $GITHUB_OUTPUT - cat changed_files.txt >> $GITHUB_OUTPUT + cat "${{ runner.temp }}/artifacts/changed_files.txt" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - name: Get PR info if: env.has_artifacts == 'true' id: pr-info uses: actions/github-script@v7 + env: + PR_NUMBER: ${{ steps.metadata.outputs.pr_number }} with: script: | - const pr_number = ${{ steps.metadata.outputs.pr_number }}; + const pr_number = parseInt(process.env.PR_NUMBER); const pr = await github.rest.pulls.get({ owner: context.repo.owner, repo: context.repo.repo, From 07f841baf6aa9af407fd8552609d8f1a6664a864 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:48:31 -0400 Subject: [PATCH 05/12] chore: refactor PR plugin build workflow to generate plugin file with placeholder URLs --- .github/workflows/pr-plugin-build.yml | 28 ++++----------------------- 1 file changed, 4 insertions(+), 24 deletions(-) diff --git a/.github/workflows/pr-plugin-build.yml b/.github/workflows/pr-plugin-build.yml index d14013e2a..12e626aee 100644 --- a/.github/workflows/pr-plugin-build.yml +++ b/.github/workflows/pr-plugin-build.yml @@ -109,38 +109,18 @@ jobs: echo "Tarball contents:" tar -tzf ${{ steps.version.outputs.local_txz }} - - name: Configure AWS CLI for R2 + - name: Generate plugin file if: steps.changed-files.outputs.has_changes == 'true' run: | - aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} - aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} - aws configure set region auto - - - name: Upload TXZ to R2 - if: steps.changed-files.outputs.has_changes == 'true' - id: upload-txz - run: | - # Upload to R2 with versioned filename to prevent SHA conflicts - aws s3 cp "${{ steps.version.outputs.local_txz }}" \ - "s3://${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }}/${{ steps.version.outputs.txz_key }}" \ - --endpoint-url "${{ secrets.CLOUDFLARE_S3_URL }}" \ - --acl public-read - - echo "Uploaded TXZ to: ${{ steps.version.outputs.txz_url }}" - - - name: Generate plugin file with R2 URL - if: steps.changed-files.outputs.has_changes == 'true' - run: | - # Local file is non-versioned, but remote URL is versioned - # Pass local filename for SHA calculation and remote filename for download + # Generate with placeholder URLs - will be updated by upload workflow bash .github/scripts/generate-pr-plugin.sh \ "${{ steps.version.outputs.version }}" \ "${{ github.event.pull_request.number }}" \ "$(git rev-parse --short HEAD)" \ "${{ steps.version.outputs.local_txz }}" \ "${{ steps.version.outputs.remote_txz }}" \ - "${{ steps.version.outputs.txz_url }}" \ - "${{ steps.version.outputs.plugin_url }}" + "PENDING_UPLOAD" \ + "PENDING_UPLOAD" - name: Save metadata for upload workflow if: steps.changed-files.outputs.has_changes == 'true' From 23a5d71e8119bc3a03b7bf734d04f822f3e2ae77 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:51:21 -0400 Subject: [PATCH 06/12] chore: revert main.page --- emhttp/plugins/dynamix/Main.page | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emhttp/plugins/dynamix/Main.page b/emhttp/plugins/dynamix/Main.page index 81d5a7c30..b68dcf2f0 100644 --- a/emhttp/plugins/dynamix/Main.page +++ b/emhttp/plugins/dynamix/Main.page @@ -1,4 +1,4 @@ Menu="Tasks:1" Type="xmenu" Code="e908" -Load="30" +Load="30" \ No newline at end of file From 620a01fa6da2a5c62902ed530a93e3968a5a2a70 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 08:54:49 -0400 Subject: [PATCH 07/12] chore: security fixes around workflow --- .github/workflows/pr-plugin-upload.yml | 58 ++++++++++++++++---------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index be6086742..3cca1cdf6 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -60,11 +60,22 @@ jobs: - name: Extract artifacts run: | - unzip "${{ runner.temp }}/artifacts/artifacts.zip" -d "${{ runner.temp }}/artifacts/" - ls -la "${{ runner.temp }}/artifacts/" + mkdir -p "${{ runner.temp }}/artifacts/unpacked" + + # Validate archive contents before extraction + bsdtar -tf "${{ runner.temp }}/artifacts/artifacts.zip" | awk ' + /^-/ {next} + { + if ($0 ~ /^\// || $0 ~ /\.\.\//) { print "INVALID:"$0 > "/dev/stderr"; exit 1 } + } + ' + + # Safe extraction with path normalization + bsdtar -xpf "${{ runner.temp }}/artifacts/artifacts.zip" -C "${{ runner.temp }}/artifacts/unpacked" --no-same-owner --no-same-permissions + ls -la "${{ runner.temp }}/artifacts/unpacked" # Check if metadata exists - if [ ! -f "${{ runner.temp }}/artifacts/pr-metadata.json" ]; then + if [ ! -f "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" ]; then echo "No metadata file found, build may not have produced any changes" echo "has_artifacts=false" >> "$GITHUB_ENV" exit 0 @@ -74,19 +85,19 @@ jobs: # Extract metadata echo "Metadata contents:" - cat "${{ runner.temp }}/artifacts/pr-metadata.json" + cat "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | # Extract values from metadata - PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/pr-metadata.json") - VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/pr-metadata.json") - PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/pr-metadata.json") - LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") - REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/pr-metadata.json") - PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/pr-metadata.json") + PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + PR_VERSION=$(jq -r '.pr_version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + LOCAL_TXZ=$(jq -r '.local_txz' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + REMOTE_TXZ=$(jq -r '.remote_txz' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + PLUGIN_NAME=$(jq -r '.plugin_name' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") # Generate R2 URLs and keys S3_BASE_URL="${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_BASE_URL }}/pr-plugins/pr-${PR_NUMBER}" @@ -108,16 +119,9 @@ jobs: echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT # Also extract changed files for comment - jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/pr-metadata.json" > "${{ runner.temp }}/artifacts/changed_files.txt" + jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" echo "Changed files:" - cat "${{ runner.temp }}/artifacts/changed_files.txt" - - - name: Configure AWS CLI for R2 - if: env.has_artifacts == 'true' - run: | - aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} - aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} - aws configure set region auto + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" - name: Upload TXZ to R2 if: env.has_artifacts == 'true' @@ -127,9 +131,15 @@ jobs: CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} TXZ_URL: ${{ steps.metadata.outputs.txz_url }} + AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + AWS_EC2_METADATA_DISABLED: true + AWS_SHARED_CREDENTIALS_FILE: /dev/null + AWS_CONFIG_FILE: /dev/null run: | # Copy from temp directory to working directory - cp "${{ runner.temp }}/artifacts/$LOCAL_TXZ" "./" + cp "${{ runner.temp }}/artifacts/unpacked/$LOCAL_TXZ" "./" # Upload to R2 with versioned filename aws s3 cp "$LOCAL_TXZ" \ @@ -168,6 +178,12 @@ jobs: CLOUDFLARE_PREVIEW_BUCKET_NAME: ${{ secrets.CLOUDFLARE_PREVIEW_BUCKET_NAME }} CLOUDFLARE_S3_URL: ${{ secrets.CLOUDFLARE_S3_URL }} PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} + AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_PREVIEW_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_PREVIEW_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + AWS_EC2_METADATA_DISABLED: true + AWS_SHARED_CREDENTIALS_FILE: /dev/null + AWS_CONFIG_FILE: /dev/null run: | # Upload PLG - overwrite existing for updates aws s3 cp "$PLUGIN_NAME" \ @@ -183,7 +199,7 @@ jobs: run: | # Format the file list for the comment echo "files<> $GITHUB_OUTPUT - cat "${{ runner.temp }}/artifacts/changed_files.txt" >> $GITHUB_OUTPUT + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - name: Get PR info From 3e8439cbe1418070c49473681aea0c3fac137b41 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 09:02:27 -0400 Subject: [PATCH 08/12] chore: improve PR plugin upload workflow with concurrency and enhanced error handling --- .github/workflows/pr-plugin-upload.yml | 54 ++++++++++++++++++++------ 1 file changed, 42 insertions(+), 12 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 3cca1cdf6..3ec82f46d 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -1,5 +1,9 @@ name: Upload PR Plugin to R2 +concurrency: + group: pr-plugin-${{ github.event.workflow_run.id || github.run_id }} + cancel-in-progress: true + on: workflow_run: workflows: ["Build PR Plugin"] @@ -16,6 +20,11 @@ jobs: runs-on: ubuntu-latest # Only run if the build workflow succeeded if: ${{ github.event.workflow_run.conclusion == 'success' }} + defaults: + run: + shell: bash + env: + SHELLOPTS: errexit:pipefail steps: - name: Checkout code @@ -23,6 +32,8 @@ jobs: - name: Prepare artifact extraction directory run: | + set -Eeuo pipefail + IFS=$'\n\t' mkdir -p "${{ runner.temp }}/artifacts/" - name: Download artifacts from build workflow @@ -60,6 +71,8 @@ jobs: - name: Extract artifacts run: | + set -Eeuo pipefail + IFS=$'\n\t' mkdir -p "${{ runner.temp }}/artifacts/unpacked" # Validate archive contents before extraction @@ -83,14 +96,15 @@ jobs: echo "has_artifacts=true" >> "$GITHUB_ENV" - # Extract metadata - echo "Metadata contents:" - cat "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" + # Validate metadata schema + echo "Metadata present; proceeding with schema validation." - name: Parse metadata if: env.has_artifacts == 'true' id: metadata run: | + set -Eeuo pipefail + IFS=$'\n\t' # Extract values from metadata PR_NUMBER=$(jq -r '.pr_number' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") VERSION=$(jq -r '.version' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") @@ -118,10 +132,15 @@ jobs: echo "txz_key=$TXZ_KEY" >> $GITHUB_OUTPUT echo "plugin_key=$PLUGIN_KEY" >> $GITHUB_OUTPUT - # Also extract changed files for comment - jq -r '.changed_files[]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" - echo "Changed files:" - cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + # Also extract changed files for comment (limit to 100 files) + jq -r '.changed_files[:100][]' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json" > "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + FILE_COUNT=$(jq '.changed_files | length' "${{ runner.temp }}/artifacts/unpacked/pr-metadata.json") + if [ "$FILE_COUNT" -gt 100 ]; then + echo "Note: Showing first 100 of $FILE_COUNT changed files" + echo "truncated=true" >> $GITHUB_OUTPUT + else + echo "truncated=false" >> $GITHUB_OUTPUT + fi - name: Upload TXZ to R2 if: env.has_artifacts == 'true' @@ -138,6 +157,8 @@ jobs: AWS_SHARED_CREDENTIALS_FILE: /dev/null AWS_CONFIG_FILE: /dev/null run: | + set -Eeuo pipefail + IFS=$'\n\t' # Copy from temp directory to working directory cp "${{ runner.temp }}/artifacts/unpacked/$LOCAL_TXZ" "./" @@ -160,6 +181,8 @@ jobs: TXZ_URL: ${{ steps.metadata.outputs.txz_url }} PLUGIN_URL: ${{ steps.metadata.outputs.plugin_url }} run: | + set -Eeuo pipefail + IFS=$'\n\t' # Regenerate the plugin with the actual R2 URLs bash .github/scripts/generate-pr-plugin.sh \ "$VERSION" \ @@ -185,6 +208,8 @@ jobs: AWS_SHARED_CREDENTIALS_FILE: /dev/null AWS_CONFIG_FILE: /dev/null run: | + set -Eeuo pipefail + IFS=$'\n\t' # Upload PLG - overwrite existing for updates aws s3 cp "$PLUGIN_NAME" \ "s3://$CLOUDFLARE_PREVIEW_BUCKET_NAME/$PLUGIN_KEY" \ @@ -197,10 +222,15 @@ jobs: if: env.has_artifacts == 'true' id: format-files run: | - # Format the file list for the comment - echo "files<> $GITHUB_OUTPUT - cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT + set -Eeuo pipefail + IFS=$'\n\t' + # Format the file list for the comment with random delimiter + DELIM="FILES_$(openssl rand -hex 8)" + { + echo "files<<$DELIM" + cat "${{ runner.temp }}/artifacts/unpacked/changed_files.txt" + echo "$DELIM" + } >> "$GITHUB_OUTPUT" - name: Get PR info if: env.has_artifacts == 'true' @@ -272,4 +302,4 @@ jobs: ``` --- - 🤖 This comment is automatically generated and will be updated with each new push to this PR. \ No newline at end of file + 🤖 This comment is automatically generated and will be updated with each new push to this PR. From 194c6c146779ec8f14db1dec1924d23e0f81b98f Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 11:11:27 -0400 Subject: [PATCH 09/12] chore: enhance PR plugin upload workflow with improved concurrency handling and security measures --- .github/workflows/pr-plugin-upload.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 3ec82f46d..608fb17d2 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -1,7 +1,9 @@ name: Upload PR Plugin to R2 concurrency: - group: pr-plugin-${{ github.event.workflow_run.id || github.run_id }} + # Use the PR number from the workflow run to group uploads for the same PR + # This ensures previous in-progress uploads for the same PR are cancelled + group: pr-plugin-${{ github.event.workflow_run.pull_requests[0].number || github.event.workflow_run.head_branch }} cancel-in-progress: true on: @@ -29,6 +31,12 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + with: + # SECURITY: Always checkout the default branch (trusted code) + # Never checkout PR code in workflow_run context + ref: ${{ github.event.repository.default_branch }} + # Ensure we're checking out the base repository, not a fork + repository: ${{ github.repository }} - name: Prepare artifact extraction directory run: | From 3bb18fb3f40ff77d9a57bf4002c3624adfa6995a Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 11:18:41 -0400 Subject: [PATCH 10/12] chore: update PR plugin upload step to allow targeting PR manually --- .github/workflows/pr-plugin-upload.yml | 70 ++++++++++++++++++++++---- 1 file changed, 59 insertions(+), 11 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 608fb17d2..5100bf25c 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -1,9 +1,9 @@ name: Upload PR Plugin to R2 concurrency: - # Use the PR number from the workflow run to group uploads for the same PR + # Use the PR number from the workflow run or manual input to group uploads for the same PR # This ensures previous in-progress uploads for the same PR are cancelled - group: pr-plugin-${{ github.event.workflow_run.pull_requests[0].number || github.event.workflow_run.head_branch }} + group: pr-plugin-${{ inputs.pr_number || github.event.workflow_run.pull_requests[0].number || github.event.workflow_run.head_branch }} cancel-in-progress: true on: @@ -11,6 +11,16 @@ on: workflows: ["Build PR Plugin"] types: - completed + workflow_dispatch: + inputs: + pr_number: + description: 'Pull Request number to build and upload' + required: true + type: string + run_id: + description: 'Workflow run ID to get artifacts from (optional, uses latest if not specified)' + required: false + type: string permissions: contents: read @@ -20,8 +30,8 @@ permissions: jobs: upload-to-r2: runs-on: ubuntu-latest - # Only run if the build workflow succeeded - if: ${{ github.event.workflow_run.conclusion == 'success' }} + # Only run if the build workflow succeeded or manual trigger + if: ${{ (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success') || github.event_name == 'workflow_dispatch' }} defaults: run: shell: bash @@ -48,10 +58,42 @@ jobs: uses: actions/github-script@v7 with: script: | + // Determine run_id based on trigger type + let run_id; + if (context.eventName === 'workflow_dispatch') { + if ('${{ inputs.run_id }}') { + run_id = parseInt('${{ inputs.run_id }}'); + } else { + // Get latest run for the PR + const workflowRuns = await github.rest.actions.listWorkflowRuns({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'pr-plugin-build.yml', + status: 'success' + }); + + // Filter for runs from the specified PR + const prNumber = parseInt('${{ inputs.pr_number }}'); + const prRuns = workflowRuns.data.workflow_runs.filter(run => + run.pull_requests && run.pull_requests.some(pr => pr.number === prNumber) + ); + + if (prRuns.length === 0) { + core.setFailed(`No successful build runs found for PR #${prNumber}`); + return; + } + + run_id = prRuns[0].id; + console.log(`Using latest build run ${run_id} for PR #${prNumber}`); + } + } else { + run_id = ${{ github.event.workflow_run.id }}; + } + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, - run_id: ${{ github.event.workflow_run.id }}, + run_id: run_id, }); let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { @@ -84,15 +126,21 @@ jobs: mkdir -p "${{ runner.temp }}/artifacts/unpacked" # Validate archive contents before extraction - bsdtar -tf "${{ runner.temp }}/artifacts/artifacts.zip" | awk ' - /^-/ {next} + unzip -l "${{ runner.temp }}/artifacts/artifacts.zip" | awk ' + NR <= 3 || /^-/ || /^Archive:/ {next} + /files$/ {exit} { - if ($0 ~ /^\// || $0 ~ /\.\.\//) { print "INVALID:"$0 > "/dev/stderr"; exit 1 } + # Extract the filename from unzip -l output (last field) + filename = $NF + if (filename ~ /^\// || filename ~ /\.\.\//) { + print "INVALID:" filename > "/dev/stderr"; + exit 1 + } } ' - # Safe extraction with path normalization - bsdtar -xpf "${{ runner.temp }}/artifacts/artifacts.zip" -C "${{ runner.temp }}/artifacts/unpacked" --no-same-owner --no-same-permissions + # Safe extraction using unzip + unzip -o "${{ runner.temp }}/artifacts/artifacts.zip" -d "${{ runner.temp }}/artifacts/unpacked" ls -la "${{ runner.temp }}/artifacts/unpacked" # Check if metadata exists @@ -268,7 +316,7 @@ jobs: A test plugin has been generated for this PR that includes the modified files. **Version:** `${{ steps.metadata.outputs.version }}` - **Build:** [View Workflow Run](${{ github.event.workflow_run.html_url }}) + **Build:** [View Workflow Run](${{ github.event.workflow_run.html_url || github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) ### đŸ“Ĩ Installation Instructions: From ada892e7b81679e0d44285f6365e0eb809914a89 Mon Sep 17 00:00:00 2001 From: Eli Bosley Date: Tue, 16 Sep 2025 11:20:44 -0400 Subject: [PATCH 11/12] chore: minor fixes for PR plugin upload --- .github/workflows/pr-plugin-upload.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr-plugin-upload.yml b/.github/workflows/pr-plugin-upload.yml index 5100bf25c..eec65a7a7 100644 --- a/.github/workflows/pr-plugin-upload.yml +++ b/.github/workflows/pr-plugin-upload.yml @@ -61,8 +61,9 @@ jobs: // Determine run_id based on trigger type let run_id; if (context.eventName === 'workflow_dispatch') { - if ('${{ inputs.run_id }}') { - run_id = parseInt('${{ inputs.run_id }}'); + const inputRunId = '${{ inputs.run_id }}'; + if (inputRunId && inputRunId !== '') { + run_id = parseInt(inputRunId); } else { // Get latest run for the PR const workflowRuns = await github.rest.actions.listWorkflowRuns({ @@ -87,7 +88,13 @@ jobs: console.log(`Using latest build run ${run_id} for PR #${prNumber}`); } } else { - run_id = ${{ github.event.workflow_run.id }}; + // For workflow_run events + run_id = '${{ github.event.workflow_run.id }}'; + if (!run_id || run_id === '') { + core.setFailed('No workflow run ID available'); + return; + } + run_id = parseInt(run_id); } let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ From 30bdb182988f099aff2ea5fce9ee76a46103d736 Mon Sep 17 00:00:00 2001 From: Squidly271 Date: Tue, 16 Sep 2025 00:32:27 -0400 Subject: [PATCH 12/12] Fix: Don't allow charts to update before previous animation is complete --- emhttp/plugins/dynamix/DashStats.page | 333 ++++++++++++++++++++++---- 1 file changed, 293 insertions(+), 40 deletions(-) mode change 100644 => 100755 emhttp/plugins/dynamix/DashStats.page diff --git a/emhttp/plugins/dynamix/DashStats.page b/emhttp/plugins/dynamix/DashStats.page old mode 100644 new mode 100755 index afbbc0c7b..c9e2a4e25 --- a/emhttp/plugins/dynamix/DashStats.page +++ b/emhttp/plugins/dynamix/DashStats.page @@ -398,11 +398,11 @@ switch ($themeHelper->getThemeName()) { // $themeHelper set in DefaultPageLayout foreach ($cpus as $pair) { [$cpu1, $cpu2] = my_preg_split('/[,-]/',$pair); echo ""; - if ($is_intel_cpu && count($core_types) > 0) - $core_type = "({$core_types[$cpu1]})"; - else + if ($is_intel_cpu && count($core_types) > 0) + $core_type = "({$core_types[$cpu1]})"; + else $core_type = ""; - + if ($cpu2) echo "CPU $cpu1 $core_type - HT $cpu2 0%
0%
"; else @@ -658,7 +658,7 @@ switch ($themeHelper->getThemeName()) { // $themeHelper set in DefaultPageLayout
- _(UPS Model)_: + _(UPS Model)_: @@ -690,7 +690,7 @@ switch ($themeHelper->getThemeName()) { // $themeHelper set in DefaultPageLayout - _(UPS Load)_: + _(UPS Load)_: @@ -1438,7 +1438,7 @@ var recover = null; var options_cpu = { series:[{name:'load', data:cpu.slice()}], - chart:{height:120, type:'line', fontFamily:'clear-sans', animations:{enabled:true, easing:'linear', dynamicAnimation:{speed:1000}}, toolbar:{show:false}, zoom:{enabled:false}}, + chart:{height:120, type:'line', fontFamily:'clear-sans', animations:{enabled:true, easing:'linear', dynamicAnimation:{speed:980}}, toolbar:{show:false}, zoom:{enabled:false},events:{updated:function(){if (cpuchart.customData.updateCount == 0) {cpuchart.customData.animationPending = false}cpuchart.customData.updateCount++;},animationEnd:function(){cpuchart.customData.animationPending = false;updateCPUChart();}}}, dataLabels:{enabled:false}, tooltip:{enabled:false}, stroke:{curve:'smooth', width:1}, @@ -1451,7 +1451,7 @@ var options_cpu = { }; var options_net = { series:[{name:'receive', data:rxd.slice()},{name:'transmit', data:txd.slice()}], - chart:{height:120, type:'line', fontFamily:'clear-sans', animations:{enabled:true, easing:'linear', dynamicAnimation:{speed:1000}}, toolbar:{show:false}, zoom:{enabled:false}}, + chart:{height:120, type:'line', fontFamily:'clear-sans', animations:{enabled:true, easing:'linear', dynamicAnimation:{speed:980}}, toolbar:{show:false}, zoom:{enabled:false},events:{updated:function(){if (netchart.customData.updateCount == 0) {netchart.customData.animationPending = false}netchart.customData.updateCount++;},animationEnd:function(){netchart.customData.animationPending = false;updateNetChart();}}}, dataLabels:{enabled:false}, tooltip:{enabled:false}, stroke:{curve:'smooth', width:1}, @@ -1466,6 +1466,137 @@ var options_net = { var cpuchart = new ApexCharts(document.querySelector('#cpuchart'), options_cpu); var netchart = new ApexCharts(document.querySelector('#netchart'), options_net); +// Add custom global variable to ncharts (won't affect ApexCharts functionality) +netchart.customData = { + isVisible: false, + BrowserVisibility: true, + animationPending: false, + netAnimationInterval: null, + newData: false, + updateCount: 0, + initialized: false +}; + +cpuchart.customData = { + isVisible: false, + BrowserVisibility: true, + animationPending: false, + cpuAnimationInterval: null, + coresVisible: false, + newData: false, + updateCount: 0, + initialized: false +}; + +$(function() { + + // Visibility observer for #netchart + const netchartElement = document.querySelector('#netchart'); + const netchartObserver = new IntersectionObserver((entries) => { + entries.forEach(entry => { + if (entry.target === netchartElement) { + netchart.customData.isVisible = entry.isIntersecting; + // Reset the update count as the chart doesn't always fire animationEnd in this case + if (netchart.customData.isVisible) { + resetNetUpdateCount(); + } + } + }); + }, { + root: null, // Use viewport as root + rootMargin: '0px', // No margin + threshold: 0.1 // Trigger when 10% of element is visible + }); + + + // Start observing the netchart element + if (netchartElement) { + netchartObserver.observe(netchartElement); + } else { + console.warn('NetChart element not found for visibility observer'); + } + + // Visibility observer for #cpuchart + const cpuchartElement = document.querySelector('#cpuchart'); + const cpuchartObserver = new IntersectionObserver((entries) => { + entries.forEach(entry => { + if (entry.target === cpuchartElement) { + cpuchart.customData.isVisible = entry.isIntersecting; + // Reset the update count as the chart doesn't always fire animationEnd in this case + if (cpuchart.customData.isVisible) { + resetCPUUpdateCount(); + } + } + + }); + }, { + root: null, // Use viewport as root + rootMargin: '0px', // No margin + threshold: 0.1 // Trigger when 10% of element is visible + }); + + // Start observing the cpuchart element + if (cpuchartElement) { + cpuchartObserver.observe(cpuchartElement); + } else { + console.warn('CpuChart element not found for visibility observer'); + } + + // Visibility observer for the cpu core load visibility + // Set the visibility true if one or more are visible + const cpuOpenElements = document.querySelectorAll('.cpu_open'); + let cpuOpenVisibilityStates = new Map(); // Track visibility state of each element + + const cpuOpenObserver = new IntersectionObserver((entries) => { + let stateChanged = false; + + entries.forEach(entry => { + const element = entry.target; + const isVisible = entry.isIntersecting; + const wasVisible = cpuOpenVisibilityStates.get(element) || false; + + // Only update if state actually changed + if (wasVisible !== isVisible) { + cpuOpenVisibilityStates.set(element, isVisible); + stateChanged = true; + } + }); + + // Only dispatch event if at least one element changed state + if (stateChanged) { + // Check if ALL elements have the same visibility state + const allHidden = Array.from(cpuOpenVisibilityStates.values()).every(state => state === false); + + cpuchart.customData.coresVisible = !allHidden; + } + }, { + root: null, // Use viewport as root + rootMargin: '0px', // No margin + threshold: 0.1 // Trigger when 10% of element is visible + }); + + // Start observing all .cpu_open elements + cpuchart.customData.coresVisible = false; + if (cpuOpenElements.length > 0) { + cpuOpenElements.forEach(element => { + cpuOpenObserver.observe(element); + }); + } else { + console.warn('No .cpu_open elements found for visibility observer'); + } + +}); + +// Debounced resize handler to account for animationEnd not firing if ApexCharts has the viewport resized +let viewportResizeTimeout; +window.addEventListener('resize', function(){ + clearTimeout(viewportResizeTimeout); + viewportResizeTimeout = setTimeout(function(){ + resetNetUpdateCount(); + resetCPUUpdateCount(); + }, 250); // Wait 250ms after resize stops before executing +}); + if (cookie.port_select && !ports.includes(cookie.port_select)) { delete cookie.port_select; saveCookie(); @@ -1501,8 +1632,9 @@ function sanitizeMultiCookie(cookieName, delimiter, removeDuplicates=false) { function initCharts(clear) { // initialize graphs entries - var data = []; - data.cpu = data.rxd = data.txd =""; + data = []; + data.cpu = data.rxd = data.txd = ""; + var now = new Date().getTime(); if (!clear) { var c = data.cpu.split(';'); @@ -1527,10 +1659,10 @@ function initCharts(clear) { } function resetCharts() { - // prevent unlimited graph growing - cpu = cpu.slice(-cpuline); - rxd = rxd.slice(-netline); - txd = txd.slice(-netline); + // prevent unlimited graph growing - limit to 300 (5 minutes) of data + cpu = cpu.slice(-300); + rxd = rxd.slice(-300); + txd = txd.slice(-300); } function addChartCpu(load) { @@ -1558,6 +1690,122 @@ function addChartNet(rx, tx) { txd.push({x:nettime, y:tx}); } + +function resetCPUUpdateCount() { + cpuchart.customData.updateCount = 0; + cpuchart.customData.animationPending = false; +} + +function resetNetUpdateCount() { + netchart.customData.updateCount = 0; + netchart.customData.animationPending = false; +} +function updateCPUBarCharts() { + if (!isPageVisible()) { + return; + } + // prevent an initial JS error if the first datapoint isn't available yet + // (cpuchart.customData.newData is reset by the updateCPUChart function so can't be used) + const customData = cpuchart.customData; + if (!customData.cpuData?.cpus || typeof customData.cpuLoad === 'undefined') { + return; + } + + const cpuLoad = customData.cpuLoad; + const critical = ; + const warning = ; + + // Cache DOM elements and calculations for overall CPU load + const cpuLoadText = cpuLoad + '%'; + const cpuLoadColor = setColor(cpuLoad, critical, warning); + const cpuLoadFontColor = fontColor(cpuLoad, critical, warning); + + // Batch DOM updates for overall CPU load + const $cpuElements = $('.cpu_, .cpu'); + const $cpuAliveElements = $('#cpu_, #cpu'); + + $cpuElements.text(cpuLoadText).css({'color': cpuLoadFontColor}); + $cpuAliveElements.alive(cpuLoadText, cpuLoadColor); + + // Update individual CPU cores if they are visible + if (customData.coresVisible) { + const cpus = customData.cpuData.cpus; + + // Batch DOM updates for CPU cores + const cpuCoreUpdates = []; + const cpuAliveUpdates = []; + + cpus.forEach((cpuCore, index) => { + const coreLoad = Math.round(cpuCore.percentTotal); + const coreLoadText = coreLoad + '%'; + const coreColor = setColor(coreLoad, critical, warning); + const coreFontColor = fontColor(coreLoad, critical, warning); + + cpuCoreUpdates.push({ + selector: '.cpu' + index, + text: coreLoadText, + color: coreFontColor + }); + + cpuAliveUpdates.push({ + selector: '#cpu' + index, + text: coreLoadText, + color: coreColor + }); + }); + + // Apply all CPU core updates in batches + cpuCoreUpdates.forEach(update => { + $(update.selector).text(update.text).css({'color': update.color}); + }); + + cpuAliveUpdates.forEach(update => { + $(update.selector).alive(update.text, update.color); + }); + } +} + +function updateCPUChart() { + if (!cpuchart.customData.newData || !cpuchart.customData.isVisible || !isPageVisible()) { + return; + } + // Check if the animation is still running and a timeout hasn't been set + if (cpuchart.customData.animationPending) { + console.log(cpuchart.customData.updateCount,"cpuchart animation not finished"); + } else { + // No animation running. Clear out the timeout and update the chart + cpuchart.customData.animationPending = true; + cpuchart.customData.newData = false; + console.log(cpuchart.customData.updateCount,'cpuchart updating chart'); + cpuchart.updateSeries([{data:cpu}]); + } +} +function updateNetChart() { + if (!netchart.customData.newData || !netchart.customData.isVisible || !isPageVisible()) { + return; + } + // Check if the animation is still running and a timeout hasn't been set + if (netchart.customData.animationPending) { + console.log(netchart.customData.updateCount,"netchart animation not finished"); + } else { + // No animation running. Clear out the timeout and update the chart + netchart.customData.animationPending = true; + netchart.customData.newData = false; + console.log(netchart.customData.updateCount,'netchart updating chart'); + netchart.updateSeries([{data:rxd},{data:txd}]); + } +} + + +function isPageVisible() { + // Check if charts are good to go + if (netchart.customData.initialized && cpuchart.customData.initialized) { + return !document.hidden; + } else { + return false; + } +} + function toggleVPN(id,vtun) { var up = $('#vpn-active'); @@ -1671,6 +1919,8 @@ function changeCPUline(val) { cpuline = val; if (val==30) delete cookie.cpuline; else cookie.cpuline = val; saveCookie(); + // Reset the update count as the chart doesn't always fire animationEnd + resetCPUUpdateCount(); cpuchart.updateOptions({xaxis:{range:cpuline-1}}); } @@ -1678,6 +1928,8 @@ function changeNetline(val) { netline = val; if (val==30) delete cookie.netline; else cookie.netline = val; saveCookie(); + // Reset the update count as the chart doesn't always fire animationEnd + resetNetUpdateCount(); netchart.updateOptions({xaxis:{range:netline-1}}); } @@ -1738,9 +1990,10 @@ function autoscale(value,text,size,kilo) { } function update900() { - // prevent chart overflowing, reset every 15 minutes + // prevent chart overflowing, reset every 5 minutes + console.log("resetting charts"); resetCharts(); - setTimeout(update900,900000); + setTimeout(update900,300000); } function attributes(page,disk) { @@ -2343,8 +2596,11 @@ dashboard.on('message',function(msg,meta) { if (port[0] == port_select) { $('#inbound').text(port[1]); $('#outbound').text(port[2]); - addChartNet(port[3], port[4]); - netchart.updateSeries([{data:rxd},{data:txd}]); + // update the netchart but only send to ApexCharts if the chart is visible + addChartNet(port[3], port[4]); + netchart.customData.newData = true; + updateNetChart(); + break; } } @@ -2405,15 +2661,17 @@ $(function() { dashboardPing.start(); initCharts(); + cpuchart.render(); netchart.render(); + addProperties(); dropdown('enter_share'); dropdown('enter_view'); startup = false; - + // Start GraphQL CPU subscription with retry logic let cpuInitAttempts = 0, cpuRetryMs = 100; function initCpuSubscription() { @@ -2430,31 +2688,21 @@ $(function() { } } `); - + cpuSubscription = window.apolloClient.subscribe({ query: CPU_SUBSCRIPTION }).subscribe({ next: (result) => { if (result.data?.systemMetricsCpu) { - const cpuData = result.data.systemMetricsCpu; - const load = Math.round(cpuData.percentTotal); - const color = setColor(load, , ); + cpuchart.customData.cpuData = result.data.systemMetricsCpu; + cpuchart.customData.cpuLoad = Math.round(cpuchart.customData.cpuData.percentTotal); + + //update cpu chart data + addChartCpu(cpuchart.customData.cpuLoad); + cpuchart.customData.newData = true; + updateCPUBarCharts(); + updateCPUChart(); - // Update main CPU display - addChartCpu(load); - cpuchart.updateSeries([{data:cpu}]); - $('.cpu_').text(load+'%').css({'color':fontColor(load, , )}); - $('#cpu_').alive(load+'%', color); - $('.cpu').text(load+'%').css({'color':fontColor(load, , )}); - $('#cpu').alive(load+'%', color); - - // Update individual CPU cores - cpuData.cpus.forEach((cpuCore, index) => { - const coreLoad = Math.round(cpuCore.percentTotal); - const coreColor = setColor(coreLoad, , ); - $('.cpu'+index).text(coreLoad+'%').css({'color':fontColor(coreLoad, , )}); - $('#cpu'+index).alive(coreLoad+'%', coreColor); - }); } }, error: (err) => { @@ -2470,7 +2718,7 @@ $(function() { setTimeout(initCpuSubscription, Math.min(cpuRetryMs *= 2, 2000)); } } - + initCpuSubscription(); dashboard.start(); @@ -2489,8 +2737,13 @@ $(function() { $('#cpuline').val(cpuline); $('#netline').val(netline); $.removeCookie('lockbutton'); - // remember latest graph values + // Inhibit chart updates until DOM quiets down + setTimeout(function() { + netchart.customData.initialized = true; + cpuchart.customData.initialized = true; + },500); + // Cleanup GraphQL subscription on page unload $(window).on('beforeunload', function() { if (cpuSubscription) {