mirror of
https://github.com/unraid/api.git
synced 2026-01-04 07:29:48 -06:00
Compare commits
49 Commits
feat/trans
...
4.22.2-bui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d9ce0aa3d | ||
|
|
9714b21c5c | ||
|
|
44b4d77d80 | ||
|
|
3f5039c342 | ||
|
|
1d2c6701ce | ||
|
|
0ee09aefbb | ||
|
|
c60a51dc1b | ||
|
|
c4fbf698b4 | ||
|
|
00faa8f9d9 | ||
|
|
45d9d65c13 | ||
|
|
771014b005 | ||
|
|
31a255c928 | ||
|
|
167857a323 | ||
|
|
b80988aaab | ||
|
|
fe4a6451f1 | ||
|
|
9a86c615da | ||
|
|
25ff8992a5 | ||
|
|
45fb53d040 | ||
|
|
c855caa9b2 | ||
|
|
ba4a43aec8 | ||
|
|
c4ca761dfc | ||
|
|
01d353fa08 | ||
|
|
4a07953457 | ||
|
|
0b20e3ea9f | ||
|
|
3f4af09db5 | ||
|
|
222ced7518 | ||
|
|
03dae7ce66 | ||
|
|
0990b898bd | ||
|
|
95faeaa2f3 | ||
|
|
b49ef5a762 | ||
|
|
c782cf0e87 | ||
|
|
f95ca9c9cb | ||
|
|
a59b363ebc | ||
|
|
2fef10c94a | ||
|
|
1c73a4af42 | ||
|
|
88a924c84f | ||
|
|
ae4d3ecbc4 | ||
|
|
c569043ab5 | ||
|
|
50ea2a3ffb | ||
|
|
b518131406 | ||
|
|
e57d81e073 | ||
|
|
88baddd6c0 | ||
|
|
abc22bdb87 | ||
|
|
6ed2f5ce8e | ||
|
|
b79b44e95c | ||
|
|
ca22285a26 | ||
|
|
838be2c52e | ||
|
|
73c1100d0b | ||
|
|
434e331384 |
@@ -1,123 +1,3 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
"permissions": {}
|
||||
}
|
||||
60
.github/workflows/build-plugin.yml
vendored
60
.github/workflows/build-plugin.yml
vendored
@@ -51,21 +51,16 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
@@ -76,14 +71,6 @@ jobs:
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
@@ -183,3 +170,40 @@ jobs:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
- name: Clean up old preview builds
|
||||
if: inputs.RELEASE_CREATED == 'false' && github.event_name == 'push'
|
||||
continue-on-error: true
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
echo "🧹 Cleaning up old preview builds (keeping last 7 days)..."
|
||||
|
||||
# Calculate cutoff date (7 days ago)
|
||||
CUTOFF_DATE=$(date -d "7 days ago" +"%Y.%m.%d")
|
||||
echo "Deleting builds older than: ${CUTOFF_DATE}"
|
||||
|
||||
# List and delete old timestamped .txz files
|
||||
OLD_FILES=$(aws s3 ls "s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} --recursive | \
|
||||
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
|
||||
awk '{print $4}' || true)
|
||||
|
||||
DELETED_COUNT=0
|
||||
if [ -n "$OLD_FILES" ]; then
|
||||
while IFS= read -r file; do
|
||||
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
|
||||
FILE_DATE="${BASH_REMATCH[1]}"
|
||||
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
|
||||
echo "Deleting old build: $(basename "$file")"
|
||||
aws s3 rm "s3://${{ secrets.CF_BUCKET_PREVIEW }}/${file}" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} || true
|
||||
((DELETED_COUNT++))
|
||||
fi
|
||||
fi
|
||||
done <<< "$OLD_FILES"
|
||||
fi
|
||||
|
||||
echo "✅ Deleted ${DELETED_COUNT} old builds"
|
||||
|
||||
11
.github/workflows/deploy-storybook.yml
vendored
11
.github/workflows/deploy-storybook.yml
vendored
@@ -22,16 +22,17 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
|
||||
149
.github/workflows/main.yml
vendored
149
.github/workflows/main.yml
vendored
@@ -6,29 +6,15 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
test-api:
|
||||
name: Test API
|
||||
defaults:
|
||||
@@ -38,36 +24,25 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -191,29 +166,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -244,7 +206,7 @@ jobs:
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
@@ -268,29 +230,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -334,29 +283,16 @@ jobs:
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
@@ -386,10 +322,32 @@ jobs:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on pushes to main AND after tests pass
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
@@ -413,9 +371,6 @@ jobs:
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
|
||||
100
.github/workflows/push-staging-pr-on-close.yml
vendored
100
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -1,4 +1,9 @@
|
||||
name: Push Staging Plugin on PR Close
|
||||
name: Replace PR Plugin with Staging Redirect on Merge
|
||||
|
||||
# This workflow runs when a PR is merged and replaces the PR-specific plugin
|
||||
# with a redirect version that points to the main staging URL.
|
||||
# This ensures users who installed the PR version will automatically
|
||||
# update to the staging version on their next update check.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,18 +22,13 @@ on:
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
push-staging-redirect:
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
@@ -45,11 +45,12 @@ jobs:
|
||||
name: unraid-plugin-.*
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow: main.yml
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: fail
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
- name: Update Downloaded Plugin to Redirect to Staging
|
||||
run: |
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
@@ -60,23 +61,82 @@ jobs:
|
||||
fi
|
||||
|
||||
echo "Found plugin file: $plgfile"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Get current version and bump it with current timestamp
|
||||
current_version=$(grep '<!ENTITY version' "${plgfile}" | sed -E 's/.*"(.*)".*/\1/')
|
||||
echo "Current version: ${current_version}"
|
||||
|
||||
# Create new version with current timestamp (ensures it's newer)
|
||||
new_version=$(date +"%Y.%m.%d.%H%M")
|
||||
echo "New redirect version: ${new_version}"
|
||||
|
||||
# Update version to trigger update
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${new_version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
# Change the plugin url to point to staging - users will switch to staging on next update
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
|
||||
echo "Modified plugin to redirect to: ${url}"
|
||||
echo "Version bumped from ${current_version} to ${new_version}"
|
||||
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
- name: Clean up old PR artifacts from Cloudflare
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Delete all existing files in the PR directory first (txz, plg, etc.)
|
||||
aws s3 rm s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/ \
|
||||
--recursive \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
echo "✅ Cleaned up old PR artifacts"
|
||||
|
||||
- name: Upload PR Redirect Plugin to Cloudflare
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Upload only the redirect plugin file
|
||||
aws s3 cp pr-release/dynamix.unraid.net.plg \
|
||||
s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
echo "✅ Uploaded redirect plugin"
|
||||
|
||||
- name: Output redirect information
|
||||
run: |
|
||||
echo "✅ PR plugin replaced with staging redirect version"
|
||||
echo "PR URL remains: https://preview.dl.unraid.net/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg"
|
||||
echo "Redirects users to staging: https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
echo "Users updating from this PR version will automatically switch to staging"
|
||||
|
||||
- name: Comment on PR about staging redirect
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: pr-closed-staging
|
||||
mode: recreate
|
||||
message: |
|
||||
## 🔄 PR Merged - Plugin Redirected to Staging
|
||||
|
||||
This PR has been merged and the preview plugin has been updated to redirect to the staging version.
|
||||
|
||||
**For users testing this PR:**
|
||||
- Your plugin will automatically update to the staging version on the next update check
|
||||
- The staging version includes all merged changes from this PR
|
||||
- No manual intervention required
|
||||
|
||||
**Staging URL:**
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
Thank you for testing! 🚀
|
||||
|
||||
31
.github/workflows/release-production.yml
vendored
31
.github/workflows/release-production.yml
vendored
@@ -28,9 +28,9 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
node-version: 22.19.0
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
@@ -125,15 +125,22 @@ jobs:
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Actions for Discord
|
||||
uses: Ilshidur/action-discord@0.4.0
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
- name: Discord Webhook Notification
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
args: |
|
||||
🚀 **Unraid API Release ${{ inputs.version }}**
|
||||
|
||||
View Release: https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}
|
||||
|
||||
**Changelog:**
|
||||
webhook-url: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
username: "Unraid API Bot"
|
||||
avatar-url: "https://craftassets.unraid.net/uploads/logos/un-mark-gradient.png"
|
||||
embed-title: "🚀 Unraid API ${{ inputs.version }} Released!"
|
||||
embed-url: "https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}"
|
||||
embed-description: |
|
||||
A new version of Unraid API has been released!
|
||||
|
||||
**Version:** `${{ inputs.version }}`
|
||||
**Release Page:** [View on GitHub](https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }})
|
||||
|
||||
**📋 Changelog:**
|
||||
${{ steps.release-info.outputs.body }}
|
||||
embed-color: 16734296
|
||||
embed-footer-text: "Unraid API • Automated Release"
|
||||
embed-timestamp: true
|
||||
|
||||
71
.github/workflows/test-libvirt.yml
vendored
71
.github/workflows/test-libvirt.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Test Libvirt
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.7"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.15.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('libvirt/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: test
|
||||
run: pnpm run test
|
||||
@@ -1 +1 @@
|
||||
{".":"4.20.1"}
|
||||
{".":"4.22.2"}
|
||||
|
||||
@@ -3,4 +3,3 @@
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
|
||||
@@ -1,708 +0,0 @@
|
||||
/**------------------------------------------------------------------------------------------------
|
||||
* SONNER.CSS
|
||||
* This is a copy of Sonner's `style.css` as of commit a5b77c2df08d5c05aa923170176168102855533d
|
||||
*
|
||||
* This was necessary because I couldn't find a simple way to include Sonner's styles in vite's
|
||||
* css build output. They wouldn't show up even though the toaster was included, and vue-sonner
|
||||
* currently doesn't export its stylesheet (it appears to be inlined, but styles weren't applied
|
||||
* to the unraid-toaster component for some reason).
|
||||
*------------------------------------------------------------------------------------------------**/
|
||||
:where(html[dir='ltr']),
|
||||
:where([data-sonner-toaster][dir='ltr']) {
|
||||
--toast-icon-margin-start: -3px;
|
||||
--toast-icon-margin-end: 4px;
|
||||
--toast-svg-margin-start: -1px;
|
||||
--toast-svg-margin-end: 0px;
|
||||
--toast-button-margin-start: auto;
|
||||
--toast-button-margin-end: 0;
|
||||
--toast-close-button-start: 0;
|
||||
--toast-close-button-end: unset;
|
||||
--toast-close-button-transform: translate(-35%, -35%);
|
||||
}
|
||||
|
||||
:where(html[dir='rtl']),
|
||||
:where([data-sonner-toaster][dir='rtl']) {
|
||||
--toast-icon-margin-start: 4px;
|
||||
--toast-icon-margin-end: -3px;
|
||||
--toast-svg-margin-start: 0px;
|
||||
--toast-svg-margin-end: -1px;
|
||||
--toast-button-margin-start: 0;
|
||||
--toast-button-margin-end: auto;
|
||||
--toast-close-button-start: unset;
|
||||
--toast-close-button-end: 0;
|
||||
--toast-close-button-transform: translate(35%, -35%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster]) {
|
||||
position: fixed;
|
||||
width: var(--width);
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Helvetica Neue, Arial,
|
||||
Noto Sans, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
|
||||
--gray1: hsl(0, 0%, 99%);
|
||||
--gray2: hsl(0, 0%, 97.3%);
|
||||
--gray3: hsl(0, 0%, 95.1%);
|
||||
--gray4: hsl(0, 0%, 93%);
|
||||
--gray5: hsl(0, 0%, 90.9%);
|
||||
--gray6: hsl(0, 0%, 88.7%);
|
||||
--gray7: hsl(0, 0%, 85.8%);
|
||||
--gray8: hsl(0, 0%, 78%);
|
||||
--gray9: hsl(0, 0%, 56.1%);
|
||||
--gray10: hsl(0, 0%, 52.3%);
|
||||
--gray11: hsl(0, 0%, 43.5%);
|
||||
--gray12: hsl(0, 0%, 9%);
|
||||
--border-radius: 8px;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
outline: none;
|
||||
z-index: 999999999;
|
||||
transition: transform 400ms ease;
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: translateY(-10px);
|
||||
}
|
||||
|
||||
@media (hover: none) and (pointer: coarse) {
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='right']) {
|
||||
right: max(var(--offset), env(safe-area-inset-right));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='left']) {
|
||||
left: max(var(--offset), env(safe-area-inset-left));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='center']) {
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='top']) {
|
||||
top: max(var(--offset), env(safe-area-inset-top));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='bottom']) {
|
||||
bottom: max(var(--offset), env(safe-area-inset-bottom));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) {
|
||||
--y: translateY(100%);
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
z-index: var(--z-index);
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
transform: var(--y);
|
||||
filter: blur(0);
|
||||
/* https://stackoverflow.com/questions/48124372/pointermove-event-not-working-with-touch-why-not */
|
||||
touch-action: none;
|
||||
transition: transform 400ms, opacity 400ms, height 400ms, box-shadow 200ms;
|
||||
box-sizing: border-box;
|
||||
outline: none;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-styled='true']) {
|
||||
padding: 16px;
|
||||
background: var(--normal-bg);
|
||||
border: 1px solid var(--normal-border);
|
||||
color: var(--normal-text);
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1);
|
||||
width: var(--width);
|
||||
font-size: 13px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]:focus-visible) {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top']) {
|
||||
top: 0;
|
||||
--y: translateY(-100%);
|
||||
--lift: 1;
|
||||
--lift-amount: calc(1 * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom']) {
|
||||
bottom: 0;
|
||||
--y: translateY(100%);
|
||||
--lift: -1;
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-description]) {
|
||||
font-weight: 400;
|
||||
line-height: 1.4;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-title]) {
|
||||
font-weight: 500;
|
||||
line-height: 1.5;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) {
|
||||
display: flex;
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
position: relative;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
margin-left: var(--toast-icon-margin-start);
|
||||
margin-right: var(--toast-icon-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-promise='true']) :where([data-icon]) > svg {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-in 300ms ease forwards;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) > * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) svg {
|
||||
margin-left: var(--toast-svg-margin-start);
|
||||
margin-right: var(--toast-svg-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-content]) {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-styled='true'] [data-button] {
|
||||
border-radius: 4px;
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
height: 24px;
|
||||
font-size: 12px;
|
||||
color: var(--normal-bg);
|
||||
background: var(--normal-text);
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
transition: opacity 400ms, box-shadow 200ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):focus-visible {
|
||||
box-shadow: 0 0 0 2px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):first-of-type {
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-cancel]) {
|
||||
color: var(--normal-text);
|
||||
background: rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-theme='dark']) :where([data-cancel]) {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
position: absolute;
|
||||
left: var(--toast-close-button-start);
|
||||
right: var(--toast-close-button-end);
|
||||
top: 0;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
min-width: inherit !important;
|
||||
margin: 0 !important;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: hsl(var(--foreground));
|
||||
border: 1px solid hsl(var(--border));
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
z-index: 1;
|
||||
transition: opacity 100ms, background 200ms, border-color 200ms;
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: hsl(var(--background));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-disabled='true']) {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: hsl(var(--muted));
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
:where([data-sonner-toast][data-swiping='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top'][data-swiping='true'])::before {
|
||||
/* y 50% needed to distribute height additional height evenly */
|
||||
bottom: 50%;
|
||||
transform: scaleY(3) translateY(50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom'][data-swiping='true'])::before {
|
||||
/* y -50% needed to distribute height additional height evenly */
|
||||
top: 50%;
|
||||
transform: scaleY(3) translateY(-50%);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when transitioning out */
|
||||
:where([data-sonner-toast][data-swiping='false'][data-removed='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
transform: scaleY(2);
|
||||
}
|
||||
|
||||
/* Needed to avoid setting hover to false when inbetween toasts */
|
||||
:where([data-sonner-toast])::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
height: calc(var(--gap) + 1px);
|
||||
bottom: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true']) {
|
||||
--y: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false']) {
|
||||
--scale: var(--toasts-before) * 0.05 + 1;
|
||||
--y: translateY(calc(var(--lift-amount) * var(--toasts-before))) scale(calc(-1 * var(--scale)));
|
||||
height: var(--front-toast-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) > * {
|
||||
transition: opacity 400ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false'][data-styled='true']) > * {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-visible='false']) {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset)));
|
||||
height: var(--initial-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='true'][data-swipe-out='false']) {
|
||||
--y: translateY(calc(var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='false']) {
|
||||
--y: translateY(40%);
|
||||
opacity: 0;
|
||||
transition: transform 500ms, opacity 200ms;
|
||||
}
|
||||
|
||||
/* Bump up the height to make sure hover state doesn't get set to false */
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'])::before {
|
||||
height: calc(var(--initial-height) + 20%);
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiping='true'] {
|
||||
transform: var(--y) translateY(var(--swipe-amount, 0px));
|
||||
transition: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiped='true'] {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='bottom'],
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='top'] {
|
||||
animation: swipe-out 200ms ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes swipe-out {
|
||||
from {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount)));
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
to {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
[data-sonner-toaster] {
|
||||
position: fixed;
|
||||
--mobile-offset: 16px;
|
||||
right: var(--mobile-offset);
|
||||
left: var(--mobile-offset);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][dir='rtl'] {
|
||||
left: calc(var(--mobile-offset) * -1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster] [data-sonner-toast] {
|
||||
left: 0;
|
||||
right: 0;
|
||||
width: calc(100% - var(--mobile-offset) * 2);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='left'] {
|
||||
left: var(--mobile-offset);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='bottom'] {
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='top'] {
|
||||
top: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='center'] {
|
||||
left: var(--mobile-offset);
|
||||
right: var(--mobile-offset);
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(208, 100%, 97%);
|
||||
--info-border: hsl(221, 91%, 91%);
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(49, 100%, 97%);
|
||||
--warning-border: hsl(49, 91%, 91%);
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%); */
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: hsl(0 0% 3.9%);
|
||||
--normal-border: hsl(0 0% 14.9%);
|
||||
--normal-text: hsl(0 0% 98%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: hsl(0 0% 100%);
|
||||
--normal-border: hsl(0 0% 89.8%);
|
||||
--normal-text: hsl(0 0% 3.9%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(215, 100%, 6%);
|
||||
--info-border: hsl(223, 100%, 12%);
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(64, 100%, 6%);
|
||||
--warning-border: hsl(60, 100%, 12%);
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%); */
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] [data-close-button] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] [data-close-button] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] [data-close-button] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] [data-close-button] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper {
|
||||
--size: 16px;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper[data-visible='false'] {
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-out 0.2s ease forwards;
|
||||
}
|
||||
|
||||
.sonner-spinner {
|
||||
position: relative;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
}
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: hsl(var(--muted-foreground));
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
position: absolute;
|
||||
top: -3.9%;
|
||||
width: 24%;
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(1) {
|
||||
animation-delay: -1.2s;
|
||||
transform: rotate(0.0001deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(2) {
|
||||
animation-delay: -1.1s;
|
||||
transform: rotate(30deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(3) {
|
||||
animation-delay: -1s;
|
||||
transform: rotate(60deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(4) {
|
||||
animation-delay: -0.9s;
|
||||
transform: rotate(90deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(5) {
|
||||
animation-delay: -0.8s;
|
||||
transform: rotate(120deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(6) {
|
||||
animation-delay: -0.7s;
|
||||
transform: rotate(150deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(7) {
|
||||
animation-delay: -0.6s;
|
||||
transform: rotate(180deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(8) {
|
||||
animation-delay: -0.5s;
|
||||
transform: rotate(210deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(9) {
|
||||
animation-delay: -0.4s;
|
||||
transform: rotate(240deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(10) {
|
||||
animation-delay: -0.3s;
|
||||
transform: rotate(270deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(11) {
|
||||
animation-delay: -0.2s;
|
||||
transform: rotate(300deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(12) {
|
||||
animation-delay: -0.1s;
|
||||
transform: rotate(330deg) translate(146%);
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-out {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-spin {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.15;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
[data-sonner-toast],
|
||||
[data-sonner-toast] > *,
|
||||
.sonner-loading-bar {
|
||||
transition: none !important;
|
||||
animation: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.sonner-loader {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
transform-origin: center;
|
||||
transition: opacity 200ms, transform 200ms;
|
||||
}
|
||||
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
|
||||
/* Override Unraid webgui docker icon styles on sonner containers */
|
||||
[data-sonner-toast] [data-icon]:before,
|
||||
[data-sonner-toast] .fa-docker:before {
|
||||
font-family: inherit !important;
|
||||
content: '' !important;
|
||||
}
|
||||
@@ -12,7 +12,6 @@
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
@@ -28,7 +27,6 @@
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(240 5.9% 90%);
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
@@ -43,7 +41,6 @@
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 25%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
@@ -58,7 +55,6 @@
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(210 40% 80%);
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
@@ -68,25 +64,25 @@
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
.has-custom-header-text {
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
.has-custom-header-meta {
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
.has-custom-header-bg {
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
|
||||
@@ -7,7 +7,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/web` - Vue 3 frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
@@ -128,9 +128,6 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
|
||||
@@ -31,3 +31,4 @@ BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
3
api/.gitignore
vendored
3
api/.gitignore
vendored
@@ -93,3 +93,6 @@ dev/local-session
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
|
||||
@@ -1,5 +1,75 @@
|
||||
# Changelog
|
||||
|
||||
## [4.22.2](https://github.com/unraid/api/compare/v4.22.1...v4.22.2) (2025-09-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** pin dependency conventional-changelog-conventionalcommits to 9.1.0 ([#1697](https://github.com/unraid/api/issues/1697)) ([9a86c61](https://github.com/unraid/api/commit/9a86c615da2e975f568922fa012cc29b3f9cde0e))
|
||||
* **deps:** update dependency filenamify to v7 ([#1703](https://github.com/unraid/api/issues/1703)) ([b80988a](https://github.com/unraid/api/commit/b80988aaabebc4b8dbf2bf31f0764bf2f28e1575))
|
||||
* **deps:** update graphqlcodegenerator monorepo (major) ([#1689](https://github.com/unraid/api/issues/1689)) ([ba4a43a](https://github.com/unraid/api/commit/ba4a43aec863fc30c47dd17370d74daed7f84703))
|
||||
* false positive on verify_install script being external shell ([#1704](https://github.com/unraid/api/issues/1704)) ([31a255c](https://github.com/unraid/api/commit/31a255c9281b29df983d0f5d0475cd5a69790a48))
|
||||
* improve vue mount speed by 10x ([c855caa](https://github.com/unraid/api/commit/c855caa9b2d4d63bead1a992f5c583e00b9ba843))
|
||||
|
||||
## [4.22.1](https://github.com/unraid/api/compare/v4.22.0...v4.22.1) (2025-09-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set input color in SSO field rather than inside of the main.css ([01d353f](https://github.com/unraid/api/commit/01d353fa08a3df688b37a495a204605138f7f71d))
|
||||
|
||||
## [4.22.0](https://github.com/unraid/api/compare/v4.21.0...v4.22.0) (2025-09-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improved update ui ([#1691](https://github.com/unraid/api/issues/1691)) ([a59b363](https://github.com/unraid/api/commit/a59b363ebc1e660f854c55d50fc02c823c2fd0cc))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** update dependency camelcase-keys to v10 ([#1687](https://github.com/unraid/api/issues/1687)) ([95faeaa](https://github.com/unraid/api/commit/95faeaa2f39bf7bd16502698d7530aaa590b286d))
|
||||
* **deps:** update dependency p-retry to v7 ([#1608](https://github.com/unraid/api/issues/1608)) ([c782cf0](https://github.com/unraid/api/commit/c782cf0e8710c6690050376feefda3edb30dd549))
|
||||
* **deps:** update dependency uuid to v13 ([#1688](https://github.com/unraid/api/issues/1688)) ([2fef10c](https://github.com/unraid/api/commit/2fef10c94aae910e95d9f5bcacf7289e2cca6ed9))
|
||||
* **deps:** update dependency vue-sonner to v2 ([#1475](https://github.com/unraid/api/issues/1475)) ([f95ca9c](https://github.com/unraid/api/commit/f95ca9c9cb69725dcf3bb4bcbd0b558a2074e311))
|
||||
* display settings fix for languages on less than 7.2-beta.2.3 ([#1696](https://github.com/unraid/api/issues/1696)) ([03dae7c](https://github.com/unraid/api/commit/03dae7ce66b3409593eeee90cd5b56e2a920ca44))
|
||||
* hide reset help option when sso is being checked ([#1695](https://github.com/unraid/api/issues/1695)) ([222ced7](https://github.com/unraid/api/commit/222ced7518d40c207198a3b8548f0e024bc865b0))
|
||||
* progressFrame white on black ([0990b89](https://github.com/unraid/api/commit/0990b898bd02c231153157c20d5142e5fd4513cd))
|
||||
|
||||
## [4.21.0](https://github.com/unraid/api/compare/v4.20.4...v4.21.0) (2025-09-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add zsh shell detection to install script ([#1539](https://github.com/unraid/api/issues/1539)) ([50ea2a3](https://github.com/unraid/api/commit/50ea2a3ffb82b30152fb85e0fb9b0d178d596efe))
|
||||
* **api:** determine if docker container has update ([#1582](https://github.com/unraid/api/issues/1582)) ([e57d81e](https://github.com/unraid/api/commit/e57d81e0735772758bb85e0b3c89dce15c56635e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* white on white login text ([ae4d3ec](https://github.com/unraid/api/commit/ae4d3ecbc417454ae3c6e02018f8e4c49bbfc902))
|
||||
|
||||
## [4.20.4](https://github.com/unraid/api/compare/v4.20.3...v4.20.4) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* staging PR plugin fixes + UI issues on 7.2 beta ([b79b44e](https://github.com/unraid/api/commit/b79b44e95c65a124313814ab55b0d0a745a799c7))
|
||||
|
||||
## [4.20.3](https://github.com/unraid/api/compare/v4.20.2...v4.20.3) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* header background color issues fixed on 7.2 - thanks Nick! ([73c1100](https://github.com/unraid/api/commit/73c1100d0ba396fe4342f8ce7561017ab821e68b))
|
||||
|
||||
## [4.20.2](https://github.com/unraid/api/compare/v4.20.1...v4.20.2) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* trigger deployment ([a27453f](https://github.com/unraid/api/commit/a27453fda81e4eeb07f257e60516bebbbc27cf7a))
|
||||
|
||||
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
|
||||
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ const config: CodegenConfig = {
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
BigInt: 'number',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
@@ -24,6 +25,7 @@ const config: CodegenConfig = {
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
BigInt: 'z.number()',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.19.1",
|
||||
"version": "4.22.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
247
api/docs/developer/feature-flags.md
Normal file
247
api/docs/developer/feature-flags.md
Normal file
@@ -0,0 +1,247 @@
|
||||
# Feature Flags
|
||||
|
||||
Feature flags allow you to conditionally enable or disable functionality in the Unraid API. This is useful for gradually rolling out new features, A/B testing, or keeping experimental code behind flags during development.
|
||||
|
||||
## Setting Up Feature Flags
|
||||
|
||||
### 1. Define the Feature Flag
|
||||
|
||||
Feature flags are defined as environment variables and collected in `src/consts.ts`:
|
||||
|
||||
```typescript
|
||||
// src/environment.ts
|
||||
export const ENABLE_MY_NEW_FEATURE = process.env.ENABLE_MY_NEW_FEATURE === 'true';
|
||||
|
||||
// src/consts.ts
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
ENABLE_MY_NEW_FEATURE, // Add your new flag here
|
||||
});
|
||||
```
|
||||
|
||||
### 2. Set the Environment Variable
|
||||
|
||||
Set the environment variable when running the API:
|
||||
|
||||
```bash
|
||||
ENABLE_MY_NEW_FEATURE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add it to your `.env` file:
|
||||
|
||||
```env
|
||||
ENABLE_MY_NEW_FEATURE=true
|
||||
```
|
||||
|
||||
## Using Feature Flags in GraphQL
|
||||
|
||||
### Method 1: @UseFeatureFlag Decorator (Schema-Level)
|
||||
|
||||
The `@UseFeatureFlag` decorator conditionally includes or excludes GraphQL fields, queries, and mutations from the schema based on feature flags. When a feature flag is disabled, the field won't appear in the GraphQL schema at all.
|
||||
|
||||
```typescript
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { Query, Mutation, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
// Conditionally include a query
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Query(() => String)
|
||||
async experimentalQuery() {
|
||||
return 'This query only exists when ENABLE_MY_NEW_FEATURE is true';
|
||||
}
|
||||
|
||||
// Conditionally include a mutation
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Mutation(() => Boolean)
|
||||
async experimentalMutation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Conditionally include a field resolver
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@ResolveField(() => String)
|
||||
async experimentalField() {
|
||||
return 'This field only exists when the flag is enabled';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Clean schema - disabled features don't appear in GraphQL introspection
|
||||
- No runtime overhead for disabled features
|
||||
- Clear feature boundaries
|
||||
|
||||
**Use when:**
|
||||
- You want to completely hide features from the GraphQL schema
|
||||
- The feature is experimental or in beta
|
||||
- You're doing a gradual rollout
|
||||
|
||||
### Method 2: checkFeatureFlag Function (Runtime)
|
||||
|
||||
The `checkFeatureFlag` function provides runtime feature flag checking within resolver methods. It throws a `ForbiddenException` if the feature is disabled.
|
||||
|
||||
```typescript
|
||||
import { checkFeatureFlag } from '@app/unraid-api/utils/feature-flag.helper.js';
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { Query, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
@Query(() => String)
|
||||
async myQuery(
|
||||
@Args('useNewAlgorithm', { nullable: true }) useNewAlgorithm?: boolean
|
||||
) {
|
||||
// Conditionally use new logic based on feature flag
|
||||
if (useNewAlgorithm) {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
return this.newAlgorithm();
|
||||
}
|
||||
|
||||
return this.oldAlgorithm();
|
||||
}
|
||||
|
||||
@ResolveField(() => String)
|
||||
async dataField() {
|
||||
// Check flag at the start of the method
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
|
||||
// Feature-specific logic here
|
||||
return this.computeExperimentalData();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- More granular control within methods
|
||||
- Can conditionally execute parts of a method
|
||||
- Useful for A/B testing scenarios
|
||||
- Good for gradual migration strategies
|
||||
|
||||
**Use when:**
|
||||
- You need conditional logic within a method
|
||||
- The field should exist but behavior changes based on the flag
|
||||
- You're migrating from old to new implementation gradually
|
||||
|
||||
## Feature Flag Patterns
|
||||
|
||||
### Pattern 1: Complete Feature Toggle
|
||||
|
||||
Hide an entire feature behind a flag:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_DOCKER_TEMPLATES')
|
||||
@Resolver(() => DockerTemplate)
|
||||
export class DockerTemplateResolver {
|
||||
// All resolvers in this class are toggled by the flag
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 2: Gradual Migration
|
||||
|
||||
Migrate from old to new implementation:
|
||||
|
||||
```typescript
|
||||
@Query(() => [Container])
|
||||
async getContainers(@Args('version') version?: string) {
|
||||
if (version === 'v2') {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_CONTAINERS_V2');
|
||||
return this.getContainersV2();
|
||||
}
|
||||
|
||||
return this.getContainersV1();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 3: Beta Features
|
||||
|
||||
Mark features as beta:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => BetaMetrics, {
|
||||
description: 'BETA: Advanced metrics (requires ENABLE_BETA_FEATURES flag)'
|
||||
})
|
||||
async betaMetrics() {
|
||||
return this.computeBetaMetrics();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 4: Performance Optimizations
|
||||
|
||||
Toggle expensive operations:
|
||||
|
||||
```typescript
|
||||
@ResolveField(() => Statistics)
|
||||
async statistics() {
|
||||
const basicStats = await this.getBasicStats();
|
||||
|
||||
try {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_ADVANCED_ANALYTICS');
|
||||
const advancedStats = await this.getAdvancedStats();
|
||||
return { ...basicStats, ...advancedStats };
|
||||
} catch {
|
||||
// Feature disabled, return only basic stats
|
||||
return basicStats;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing with Feature Flags
|
||||
|
||||
When writing tests for feature-flagged code, create a mock to control feature flag values:
|
||||
|
||||
```typescript
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// Mock the entire consts module
|
||||
vi.mock('@app/consts.js', async () => {
|
||||
const actual = await vi.importActual('@app/consts.js');
|
||||
return {
|
||||
...actual,
|
||||
FeatureFlags: {
|
||||
ENABLE_MY_NEW_FEATURE: true, // Set your test value
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
describe('MyResolver', () => {
|
||||
it('should execute new logic when feature is enabled', async () => {
|
||||
// Test new behavior with mocked flag
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Naming Convention**: Use `ENABLE_` prefix for boolean feature flags
|
||||
2. **Environment Variables**: Always use uppercase with underscores
|
||||
3. **Documentation**: Document what each feature flag controls
|
||||
4. **Cleanup**: Remove feature flags once features are stable and fully rolled out
|
||||
5. **Default State**: New features should default to `false` (disabled)
|
||||
6. **Granularity**: Keep feature flags focused on a single feature or capability
|
||||
7. **Testing**: Always test both enabled and disabled states
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
- **Experimental Features**: Hide unstable features in production
|
||||
- **Gradual Rollouts**: Enable features for specific environments first
|
||||
- **A/B Testing**: Toggle between different implementations
|
||||
- **Performance**: Disable expensive operations when not needed
|
||||
- **Breaking Changes**: Provide migration path with both old and new behavior
|
||||
- **Debug Features**: Enable additional logging or debugging tools
|
||||
|
||||
## Checking Active Feature Flags
|
||||
|
||||
To see which feature flags are currently active:
|
||||
|
||||
```typescript
|
||||
// Log all feature flags on startup
|
||||
console.log('Active Feature Flags:', FeatureFlags);
|
||||
```
|
||||
|
||||
Or check via GraphQL introspection to see which fields are available based on current flags.
|
||||
@@ -139,6 +139,9 @@ type ArrayDisk implements Node {
|
||||
"""ata | nvme | usb | (others)"""
|
||||
transport: String
|
||||
color: ArrayDiskFsColor
|
||||
|
||||
"""Whether the disk is currently spinning"""
|
||||
isSpinning: Boolean
|
||||
}
|
||||
|
||||
interface Node {
|
||||
@@ -346,6 +349,9 @@ type Disk implements Node {
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
|
||||
"""Whether the disk is spinning or not"""
|
||||
isSpinning: Boolean!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
@@ -1044,6 +1050,19 @@ enum ThemeName {
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
}
|
||||
|
||||
"""Update status of a container."""
|
||||
enum UpdateStatus {
|
||||
UP_TO_DATE
|
||||
UPDATE_AVAILABLE
|
||||
REBUILD_READY
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type ContainerPort {
|
||||
ip: String
|
||||
privatePort: Port
|
||||
@@ -1074,8 +1093,8 @@ type DockerContainer implements Node {
|
||||
created: Int!
|
||||
ports: [ContainerPort!]!
|
||||
|
||||
"""Total size of all the files in the container"""
|
||||
sizeRootFs: Int
|
||||
"""Total size of all files in the container (in bytes)"""
|
||||
sizeRootFs: BigInt
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
@@ -1083,6 +1102,8 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
@@ -1113,6 +1134,7 @@ type Docker implements Node {
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
@@ -2413,6 +2435,7 @@ type Mutation {
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.20.1",
|
||||
"version": "4.22.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -56,7 +56,7 @@
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-codegen/client-preset": "5.0.0",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
@@ -84,7 +84,7 @@
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
@@ -94,7 +94,7 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.3",
|
||||
"cron": "4.3.0",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
@@ -103,7 +103,7 @@
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
@@ -127,7 +127,7 @@
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "6.2.1",
|
||||
"p-retry": "7.0.0",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
@@ -141,7 +141,7 @@
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "11.1.0",
|
||||
"uuid": "13.0.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
@@ -156,14 +156,14 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/add": "6.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-codegen/fragment-matcher": "6.0.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-codegen/typed-document-node": "6.0.0",
|
||||
"@graphql-codegen/typescript": "5.0.0",
|
||||
"@graphql-codegen/typescript-operations": "5.0.0",
|
||||
"@graphql-codegen/typescript-resolvers": "5.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
@@ -205,7 +205,7 @@
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
|
||||
@@ -12,7 +12,22 @@ import {
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('got', () => {
|
||||
const mockPost = vi.fn();
|
||||
const gotMock = {
|
||||
post: mockPost,
|
||||
};
|
||||
return {
|
||||
default: gotMock,
|
||||
HTTPError: class HTTPError extends Error {
|
||||
response?: any;
|
||||
constructor(response?: any) {
|
||||
super('HTTP Error');
|
||||
this.response = response;
|
||||
}
|
||||
},
|
||||
};
|
||||
});
|
||||
vi.mock('execa');
|
||||
vi.mock('p-retry');
|
||||
vi.mock('node:fs', () => ({
|
||||
@@ -60,7 +75,7 @@ vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
|
||||
describe('RCloneApiService', () => {
|
||||
let service: RCloneApiService;
|
||||
let mockGot: any;
|
||||
let mockGotPost: any;
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
@@ -68,19 +83,19 @@ describe('RCloneApiService', () => {
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { default: got } = await import('got');
|
||||
const got = await import('got');
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockGotPost = vi.mocked(got.default.post);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
mockGotPost.mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
@@ -97,10 +112,10 @@ describe('RCloneApiService', () => {
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
mockGotPost.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -109,15 +124,15 @@ describe('RCloneApiService', () => {
|
||||
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
|
||||
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { providers: mockProviders },
|
||||
});
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual(mockProviders);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/providers',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/providers$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -130,7 +145,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no providers', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
@@ -141,15 +156,15 @@ describe('RCloneApiService', () => {
|
||||
describe('listRemotes', () => {
|
||||
it('should return list of remotes', async () => {
|
||||
const mockRemotes = ['backup-s3', 'drive-storage'];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { remotes: mockRemotes },
|
||||
});
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual(mockRemotes);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/listremotes$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -162,7 +177,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no remotes', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
@@ -174,13 +189,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote details', async () => {
|
||||
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', provider: 'AWS' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteDetails(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/get$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
@@ -197,7 +212,7 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote configuration', async () => {
|
||||
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteConfig(input);
|
||||
|
||||
@@ -213,13 +228,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.createRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/create',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/create$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'new-remote',
|
||||
@@ -243,13 +258,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'NEW_AKIA...' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.updateRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/update',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/update$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'existing-remote',
|
||||
@@ -269,13 +284,13 @@ describe('RCloneApiService', () => {
|
||||
it('should delete a remote', async () => {
|
||||
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.deleteRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/delete$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
@@ -296,13 +311,13 @@ describe('RCloneApiService', () => {
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/sync\/copy$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
srcFs: '/source/path',
|
||||
@@ -323,13 +338,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
mockGotPost.mockResolvedValue({ body: mockStatus });
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
expect(result).toEqual(mockStatus);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/status$/),
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
@@ -348,13 +363,13 @@ describe('RCloneApiService', () => {
|
||||
{ id: 'job-1', status: 'running' },
|
||||
{ id: 'job-2', status: 'finished' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({ body: mockJobs });
|
||||
mockGotPost.mockResolvedValue({ body: mockJobs });
|
||||
|
||||
const result = await service.listRunningJobs();
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/list$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -378,7 +393,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
|
||||
@@ -395,7 +410,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
@@ -412,7 +427,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
@@ -421,17 +436,108 @@ describe('RCloneApiService', () => {
|
||||
|
||||
it('should handle non-HTTP errors', async () => {
|
||||
const networkError = new Error('Network connection failed');
|
||||
mockGot.post.mockRejectedValue(networkError);
|
||||
mockGotPost.mockRejectedValue(networkError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
|
||||
});
|
||||
|
||||
it('should handle unknown errors', async () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
mockGotPost.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkRcloneBinaryExists', () => {
|
||||
beforeEach(() => {
|
||||
// Create a new service instance without initializing for these tests
|
||||
service = new RCloneApiService();
|
||||
});
|
||||
|
||||
it('should return true when rclone version is 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when rclone version is newer than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.75.2\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is older than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.69.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is much older', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.50.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when version cannot be parsed', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone unknown version format',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone binary is not found', async () => {
|
||||
const error = new Error('Command not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
mockExeca.mockRejectedValueOnce(error);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false and log error for other exceptions', async () => {
|
||||
mockExeca.mockRejectedValueOnce(new Error('Some other error'));
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle beta/rc versions correctly', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0-beta.1\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ import { join } from 'path';
|
||||
|
||||
import type { JSONWebKeySet } from 'jose';
|
||||
|
||||
import { PORT } from '@app/environment.js';
|
||||
import { ENABLE_NEXT_DOCKER_RELEASE, PORT } from '@app/environment.js';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = PORT;
|
||||
@@ -79,3 +79,14 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
/**
|
||||
* Feature flags are used to conditionally enable or disable functionality in the Unraid API.
|
||||
*
|
||||
* Keys are human readable feature flag names -- will be used to construct error messages.
|
||||
*
|
||||
* Values are boolean/truthy values.
|
||||
*/
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -17,30 +17,27 @@ const nullDestination = pino.destination({
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorizeObjects: true,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorizeObjects: false,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (logLevel: string | object, key: string, log: any, extras: any) => {
|
||||
// Use labelColorized which preserves the colors
|
||||
const { labelColorized } = extras;
|
||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||
// Use label instead of labelColorized for non-colored output
|
||||
const { label } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${labelColorized} ${context}]`;
|
||||
return `${label} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
@@ -98,7 +95,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin' });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
|
||||
@@ -110,3 +110,6 @@ export const PATHS_CONFIG_MODULES =
|
||||
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
@@ -14,6 +14,7 @@ import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { CronModule } from '@app/unraid-api/cron/cron.module.js';
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { GraphModule } from '@app/unraid-api/graph/graph.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
|
||||
@@ -24,7 +25,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
JobModule,
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
|
||||
@@ -15,7 +15,7 @@ export type Scalars = {
|
||||
Int: { input: number; output: number; }
|
||||
Float: { input: number; output: number; }
|
||||
/** The `BigInt` scalar type represents non-fractional signed whole numeric values. */
|
||||
BigInt: { input: any; output: any; }
|
||||
BigInt: { input: number; output: number; }
|
||||
/** A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format. */
|
||||
DateTime: { input: string; output: string; }
|
||||
/** The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
|
||||
@@ -241,6 +241,8 @@ export type ArrayDisk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. */
|
||||
idx: Scalars['Int']['output'];
|
||||
/** Whether the disk is currently spinning */
|
||||
isSpinning?: Maybe<Scalars['Boolean']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
/** Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. */
|
||||
numErrors?: Maybe<Scalars['BigInt']['output']>;
|
||||
@@ -607,6 +609,8 @@ export type Disk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** The interface type of the disk */
|
||||
interfaceType: DiskInterfaceType;
|
||||
/** Whether the disk is spinning or not */
|
||||
isSpinning: Scalars['Boolean']['output'];
|
||||
/** The model name of the disk */
|
||||
name: Scalars['String']['output'];
|
||||
/** The partitions on the disk */
|
||||
@@ -674,6 +678,7 @@ export enum DiskSmartStatus {
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||
containers: Array<DockerContainer>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
networks: Array<DockerNetwork>;
|
||||
@@ -699,13 +704,15 @@ export type DockerContainer = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
image: Scalars['String']['output'];
|
||||
imageId: Scalars['String']['output'];
|
||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||
labels?: Maybe<Scalars['JSON']['output']>;
|
||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||
names: Array<Scalars['String']['output']>;
|
||||
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
||||
ports: Array<ContainerPort>;
|
||||
/** Total size of all the files in the container */
|
||||
sizeRootFs?: Maybe<Scalars['Int']['output']>;
|
||||
/** Total size of all files in the container (in bytes) */
|
||||
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
||||
state: ContainerState;
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
@@ -770,6 +777,12 @@ export type EnableDynamicRemoteAccessInput = {
|
||||
url: AccessUrlInput;
|
||||
};
|
||||
|
||||
export type ExplicitStatusItem = {
|
||||
__typename?: 'ExplicitStatusItem';
|
||||
name: Scalars['String']['output'];
|
||||
updateStatus: UpdateStatus;
|
||||
};
|
||||
|
||||
export type Flash = Node & {
|
||||
__typename?: 'Flash';
|
||||
guid: Scalars['String']['output'];
|
||||
@@ -1225,6 +1238,7 @@ export type Mutation = {
|
||||
rclone: RCloneMutations;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
recalculateOverview: NotificationOverview;
|
||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||
removePlugin: Scalars['Boolean']['output'];
|
||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||
@@ -2260,6 +2274,14 @@ export type UpdateSettingsResponse = {
|
||||
warnings?: Maybe<Array<Scalars['String']['output']>>;
|
||||
};
|
||||
|
||||
/** Update status of a container. */
|
||||
export enum UpdateStatus {
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
UP_TO_DATE = 'UP_TO_DATE'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
|
||||
@@ -35,7 +35,8 @@ export class RestartCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
|
||||
if (stderr) {
|
||||
|
||||
@@ -33,7 +33,8 @@ export class StartCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
if (stdout) {
|
||||
this.logger.log(stdout.toString());
|
||||
|
||||
@@ -8,6 +8,11 @@ export class StatusCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
await this.pm2.run({ tag: 'PM2 Status', stdio: 'inherit', raw: true }, 'status', 'unraid-api');
|
||||
await this.pm2.run(
|
||||
{ tag: 'PM2 Status', stdio: 'inherit', raw: true },
|
||||
'status',
|
||||
'unraid-api',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,8 @@ export class StopCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Delete', stdio: 'inherit' },
|
||||
'delete',
|
||||
ECOSYSTEM_PATH,
|
||||
'--no-autorestart'
|
||||
'--no-autorestart',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
|
||||
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
imports: [JobModule],
|
||||
providers: [WriteFlashFileService, LogRotateService],
|
||||
})
|
||||
export class CronModule {}
|
||||
|
||||
13
api/src/unraid-api/cron/job.module.ts
Normal file
13
api/src/unraid-api/cron/job.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
/**
|
||||
* Sets up common dependencies for initializing jobs (e.g. scheduler registry, cron jobs).
|
||||
*
|
||||
* Simplifies testing setup & application dependency tree by ensuring `forRoot` is called only once.
|
||||
*/
|
||||
@Module({
|
||||
imports: [ScheduleModule.forRoot()],
|
||||
exports: [ScheduleModule],
|
||||
})
|
||||
export class JobModule {}
|
||||
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Field, Mutation, ObjectType, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY, OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
describe('OmitIf Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
describe('OmitIf', () => {
|
||||
it('should set metadata when condition is true', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should not set metadata when condition is false', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(false)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should evaluate function conditions', () => {
|
||||
const mockCondition = vi.fn(() => true);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should evaluate function conditions that return false', () => {
|
||||
const mockCondition = vi.fn(() => false);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with environment variables', () => {
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(process.env.NODE_ENV === 'production')
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with NestJS GraphQL decorators', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Query(() => String)
|
||||
omittedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Query(() => String)
|
||||
includedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedQuery);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedQuery);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Mutation(() => String)
|
||||
omittedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Mutation(() => String)
|
||||
includedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedMutation);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedMutation);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@ObjectType()
|
||||
class TestType {
|
||||
@Field()
|
||||
id: string = '';
|
||||
}
|
||||
|
||||
@Resolver(() => TestType)
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@ResolveField(() => String)
|
||||
omittedField() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@ResolveField(() => String)
|
||||
includedField() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedField);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedField);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
import { Extensions } from '@nestjs/graphql';
|
||||
|
||||
import { MapperKind, mapSchema } from '@graphql-tools/utils';
|
||||
import { GraphQLFieldConfig, GraphQLSchema } from 'graphql';
|
||||
|
||||
export const OMIT_IF_METADATA_KEY = 'omitIf';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally omits a GraphQL field/query/mutation based on a condition.
|
||||
* The field will only be omitted from the schema when the condition evaluates to true.
|
||||
*
|
||||
* @param condition - If the condition evaluates to true, the field will be omitted from the schema
|
||||
* @returns A decorator that wraps the target field/query/mutation
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @OmitIf(process.env.NODE_ENV === 'production')
|
||||
* @Query(() => String)
|
||||
* async debugQuery() {
|
||||
* return 'This query is omitted in production';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function OmitIf(condition: boolean | (() => boolean)): MethodDecorator & PropertyDecorator {
|
||||
const shouldOmit = typeof condition === 'function' ? condition() : condition;
|
||||
|
||||
return (target: object, propertyKey?: string | symbol, descriptor?: PropertyDescriptor) => {
|
||||
if (shouldOmit) {
|
||||
SetMetadata(OMIT_IF_METADATA_KEY, true)(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
Extensions({ omitIf: true })(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema transformer that omits fields/queries/mutations based on the OmitIf decorator.
|
||||
* @param schema - The GraphQL schema to transform
|
||||
* @returns The transformed GraphQL schema
|
||||
*/
|
||||
export function omitIfSchemaTransformer(schema: GraphQLSchema): GraphQLSchema {
|
||||
return mapSchema(schema, {
|
||||
[MapperKind.OBJECT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.ROOT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
});
|
||||
}
|
||||
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
// fixme: types don't sync with mocks, and there's no override to simplify testing.
|
||||
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
|
||||
// Mock the FeatureFlags
|
||||
vi.mock('@app/consts.js', () => ({
|
||||
FeatureFlags: Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
ENABLE_EXPERIMENTAL_FEATURE: true,
|
||||
ENABLE_DEBUG_MODE: false,
|
||||
ENABLE_BETA_FEATURES: true,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('UseFeatureFlag Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should omit field when feature flag is false', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBe(true); // Should be omitted because flag is false
|
||||
});
|
||||
|
||||
it('should include field when feature flag is true', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBeUndefined(); // Should not be omitted because flag is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('With different decorator types', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
dockerMutation() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Mutation(() => String)
|
||||
experimentalMutation() {
|
||||
return 'experimental';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const dockerMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.dockerMutation);
|
||||
const experimentalMetadata = reflector.get(
|
||||
OMIT_IF_METADATA_KEY,
|
||||
instance.experimentalMutation
|
||||
);
|
||||
|
||||
expect(dockerMetadata).toBe(true); // ENABLE_NEXT_DOCKER_RELEASE is false
|
||||
expect(experimentalMetadata).toBeUndefined(); // ENABLE_EXPERIMENTAL_FEATURE is true
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@ResolveField(() => String)
|
||||
debugField() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => String)
|
||||
betaField() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugField);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaField);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple decorators on same class', () => {
|
||||
it('should handle multiple feature flags independently', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type safety', () => {
|
||||
it('should only accept valid feature flag keys', () => {
|
||||
// This test verifies TypeScript compile-time type safety
|
||||
// The following would cause a TypeScript error if uncommented:
|
||||
// @UseFeatureFlag('INVALID_FLAG')
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
validQuery() {
|
||||
return 'valid';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
expect(instance.validQuery).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration scenarios', () => {
|
||||
it('should work correctly with other decorators', () => {
|
||||
const customDecorator = (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: PropertyDescriptor
|
||||
) => {
|
||||
Reflect.defineMetadata('custom', true, target, propertyKey);
|
||||
return descriptor;
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@customDecorator
|
||||
@Query(() => String)
|
||||
multiDecoratorQuery() {
|
||||
return 'multi';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omitMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.multiDecoratorQuery);
|
||||
const customMetadata = Reflect.getMetadata('custom', instance, 'multiDecoratorQuery');
|
||||
|
||||
expect(omitMetadata).toBe(true);
|
||||
expect(customMetadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should maintain correct decorator order', () => {
|
||||
const orderTracker: string[] = [];
|
||||
|
||||
const trackingDecorator = (name: string) => {
|
||||
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
|
||||
orderTracker.push(name);
|
||||
return descriptor;
|
||||
};
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@trackingDecorator('first')
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@trackingDecorator('last')
|
||||
@Query(() => String)
|
||||
orderedQuery() {
|
||||
return 'ordered';
|
||||
}
|
||||
}
|
||||
|
||||
// Decorators are applied bottom-up
|
||||
expect(orderTracker).toEqual(['last', 'first']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world usage patterns', () => {
|
||||
it('should work with Docker resolver pattern', () => {
|
||||
@Resolver()
|
||||
class DockerResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async createDockerFolder(name: string) {
|
||||
return `Created folder: ${name}`;
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async deleteDockerEntries(entryIds: string[]) {
|
||||
return `Deleted entries: ${entryIds.join(', ')}`;
|
||||
}
|
||||
|
||||
@Query(() => String)
|
||||
async getDockerInfo() {
|
||||
return 'Docker info';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new DockerResolver();
|
||||
|
||||
// Feature flag is false, so these should be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.createDockerFolder)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.deleteDockerEntries)).toBe(true);
|
||||
|
||||
// No feature flag, so this should not be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.getDockerInfo)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle mixed feature flags in same resolver', () => {
|
||||
@Resolver()
|
||||
class MixedResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Mutation(() => String)
|
||||
betaMutation() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new MixedResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaMutation)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally includes a GraphQL field/query/mutation based on a feature flag.
|
||||
* The field will only be included in the schema when the feature flag is enabled.
|
||||
*
|
||||
* @param flagKey - The key of the feature flag in FeatureFlags
|
||||
* @returns A decorator that wraps OmitIf
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
* @Mutation(() => String)
|
||||
* async experimentalMutation() {
|
||||
* return 'This mutation is only available when ENABLE_NEXT_DOCKER_RELEASE is true';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function UseFeatureFlag(flagKey: keyof typeof FeatureFlags): MethodDecorator & PropertyDecorator {
|
||||
return OmitIf(!FeatureFlags[flagKey]);
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import { NoUnusedVariablesRule } from 'graphql';
|
||||
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { omitIfSchemaTransformer } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
// Import enum registrations to ensure they're registered with GraphQL
|
||||
import '@app/unraid-api/graph/auth/auth-action.enum.js';
|
||||
@@ -64,7 +65,12 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
},
|
||||
// Only add transform when not in test environment to avoid GraphQL version conflicts
|
||||
transformSchema:
|
||||
process.env.NODE_ENV === 'test' ? undefined : usePermissionsSchemaTransformer,
|
||||
process.env.NODE_ENV === 'test'
|
||||
? undefined
|
||||
: (schema) => {
|
||||
const schemaWithPermissions = usePermissionsSchemaTransformer(schema);
|
||||
return omitIfSchemaTransformer(schemaWithPermissions);
|
||||
},
|
||||
validationRules: [NoUnusedVariablesRule],
|
||||
};
|
||||
},
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import { Injectable, Logger, OnApplicationBootstrap } from '@nestjs/common';
|
||||
import { SchedulerRegistry, Timeout } from '@nestjs/schedule';
|
||||
|
||||
import { CronJob } from 'cron';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class ContainerStatusJob implements OnApplicationBootstrap {
|
||||
private readonly logger = new Logger(ContainerStatusJob.name);
|
||||
constructor(
|
||||
private readonly dockerManifestService: DockerManifestService,
|
||||
private readonly schedulerRegistry: SchedulerRegistry,
|
||||
private readonly dockerConfigService: DockerConfigService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
|
||||
*/
|
||||
onApplicationBootstrap() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
|
||||
const cronJob = CronJob.from({
|
||||
cronTime: cronExpression,
|
||||
onTick: () => {
|
||||
this.dockerManifestService.refreshDigests().catch((error) => {
|
||||
this.logger.warn(error, 'Failed to refresh container update status');
|
||||
});
|
||||
},
|
||||
start: true,
|
||||
});
|
||||
this.schedulerRegistry.addCronJob(ContainerStatusJob.name, cronJob);
|
||||
this.logger.verbose(
|
||||
`Initialized cron job for refreshing container update status: ${ContainerStatusJob.name}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh container digests 5 seconds after application start.
|
||||
*/
|
||||
@Timeout(5_000)
|
||||
async refreshContainerDigestsAfterStartup() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
await this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { Field, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
@ObjectType()
|
||||
export class DockerConfig {
|
||||
@Field(() => String)
|
||||
updateCheckCronSchedule!: string;
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
|
||||
vi.mock('cron', () => ({
|
||||
validateCronExpression: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@app/unraid-api/graph/resolvers/validation.utils.js', () => ({
|
||||
validateObject: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('DockerConfigService - validate', () => {
|
||||
let service: DockerConfigService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
DockerConfigService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DockerConfigService>(DockerConfigService);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should validate and return docker config for valid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should validate and return docker config for predefined cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
const validatedConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith(CronExpression.EVERY_DAY_AT_6AM);
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should throw AppError for invalid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
const validatedConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: invalid-cron')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('invalid-cron');
|
||||
});
|
||||
|
||||
it('should throw AppError for empty cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: ')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('should throw AppError for malformed cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: * * * *')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('* * * *');
|
||||
});
|
||||
|
||||
it('should propagate validation errors from validateObject', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validationError = new ValidationError();
|
||||
validationError.property = 'updateCheckCronSchedule';
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
|
||||
vi.mocked(validateObject).mockRejectedValue(validationError);
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow();
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
});
|
||||
|
||||
it('should handle complex valid cron expressions', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 0,12 * * 1-5');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should handle input with extra properties', async () => {
|
||||
const inputConfig = {
|
||||
updateCheckCronSchedule: '0 6 * * *',
|
||||
extraProperty: 'should be ignored',
|
||||
};
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,59 +1,45 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { validateCronExpression } from 'cron';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfig } from '@app/unraid-api/graph/resolvers/docker/docker-config.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
return 'docker.config.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
defaultConfig(): DockerConfig {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
async validate(config: object): Promise<DockerConfig> {
|
||||
const dockerConfig = await validateObject(DockerConfig, config);
|
||||
const cronExpression = validateCronExpression(dockerConfig.updateCheckCronSchedule);
|
||||
if (!cronExpression.valid) {
|
||||
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
|
||||
}
|
||||
return organizer;
|
||||
return dockerConfig;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
|
||||
@Resolver(() => DockerContainer)
|
||||
export class DockerContainerResolver {
|
||||
private readonly logger = new Logger(DockerContainerResolver.name);
|
||||
constructor(private readonly dockerManifestService: DockerManifestService) {}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isUpdateAvailable(@Parent() container: DockerContainer) {
|
||||
try {
|
||||
return await this.dockerManifestService.isUpdateAvailableCached(container.image);
|
||||
} catch (error) {
|
||||
this.logger.error(error);
|
||||
throw new AppError('Failed to read cached update status. See graphql-api.log for details.');
|
||||
}
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isRebuildReady(@Parent() container: DockerContainer) {
|
||||
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
public async refreshDockerDigests() {
|
||||
return this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { AsyncMutex } from '@unraid/shared/util/processing.js';
|
||||
|
||||
import { docker } from '@app/core/utils/index.js';
|
||||
import {
|
||||
CachedStatusEntry,
|
||||
DockerPhpService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerManifestService {
|
||||
constructor(private readonly dockerPhpService: DockerPhpService) {}
|
||||
|
||||
private readonly refreshDigestsMutex = new AsyncMutex(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp();
|
||||
});
|
||||
|
||||
/**
|
||||
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
|
||||
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
|
||||
* @param dockerUpdatePath - Optional path to the DockerUpdate.php file. If not provided, the default path will be used.
|
||||
* @returns True if the digests were refreshed, false if the operation failed
|
||||
*/
|
||||
async refreshDigests(mutex = this.refreshDigestsMutex, dockerUpdatePath?: string) {
|
||||
return mutex.do(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp(dockerUpdatePath);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an update is available for a given container image.
|
||||
* @param imageRef - The image reference to check, e.g. "unraid/baseimage:latest". If no tag is provided, "latest" is assumed, following the webgui's implementation.
|
||||
* @param cacheData read from /var/lib/docker/unraid-update-status.json by default
|
||||
* @returns True if an update is available, false if not, or null if the status is unknown
|
||||
*/
|
||||
async isUpdateAvailableCached(imageRef: string, cacheData?: Record<string, CachedStatusEntry>) {
|
||||
let taggedRef = imageRef;
|
||||
if (!taggedRef.includes(':')) taggedRef += ':latest';
|
||||
|
||||
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
|
||||
const containerData = cacheData[taggedRef];
|
||||
if (!containerData) return null;
|
||||
return containerData.status?.toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a container is rebuild ready.
|
||||
* @param networkMode - The network mode of the container, e.g. "container:unraid/baseimage:latest".
|
||||
* @returns True if the container is rebuild ready, false if not
|
||||
*/
|
||||
async isRebuildReady(networkMode?: string) {
|
||||
if (!networkMode || !networkMode.startsWith('container:')) return false;
|
||||
const target = networkMode.slice('container:'.length);
|
||||
try {
|
||||
await docker.getContainer(target).inspect();
|
||||
return false;
|
||||
} catch {
|
||||
return true; // unresolved target -> ':???' equivalent
|
||||
}
|
||||
}
|
||||
}
|
||||
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
import { phpLoader } from '@app/core/utils/plugins/php-loader.js';
|
||||
import {
|
||||
ExplicitStatusItem,
|
||||
UpdateStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
type StatusItem = { name: string; updateStatus: 0 | 1 | 2 | 3 };
|
||||
|
||||
/**
|
||||
* These types reflect the structure of the /var/lib/docker/unraid-update-status.json file,
|
||||
* which is not controlled by the Unraid API.
|
||||
*/
|
||||
const CachedStatusEntrySchema = z.object({
|
||||
/** sha256 digest - "sha256:..." */
|
||||
local: z.string(),
|
||||
/** sha256 digest - "sha256:..." */
|
||||
remote: z.string(),
|
||||
/** whether update is available (true), not available (false), or unknown (null) */
|
||||
status: z.enum(['true', 'false']).nullable(),
|
||||
});
|
||||
const CachedStatusSchema = z.record(z.string(), CachedStatusEntrySchema);
|
||||
export type CachedStatusEntry = z.infer<typeof CachedStatusEntrySchema>;
|
||||
|
||||
@Injectable()
|
||||
export class DockerPhpService {
|
||||
private readonly logger = new Logger(DockerPhpService.name);
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Reads JSON from a file containing cached update status.
|
||||
* If the file does not exist, an empty object is returned.
|
||||
* @param cacheFile
|
||||
* @returns
|
||||
*/
|
||||
async readCachedUpdateStatus(
|
||||
cacheFile = '/var/lib/docker/unraid-update-status.json'
|
||||
): Promise<Record<string, CachedStatusEntry>> {
|
||||
try {
|
||||
const cache = await readFile(cacheFile, 'utf8');
|
||||
const cacheData = JSON.parse(cache);
|
||||
const { success, data } = CachedStatusSchema.safeParse(cacheData);
|
||||
if (success) return data;
|
||||
this.logger.warn(cacheData, 'Invalid cached update status');
|
||||
return {};
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Failed to read cached update status');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Refresh Container Digests
|
||||
*------------------------**/
|
||||
|
||||
/**
|
||||
* Recomputes local/remote digests by triggering `DockerTemplates->getAllInfo(true)` via DockerUpdate.php
|
||||
* @param dockerUpdatePath - Path to the DockerUpdate.php file
|
||||
* @returns True if the digests were refreshed, false if the file is not found or the operation failed
|
||||
*/
|
||||
async refreshDigestsViaPhp(
|
||||
dockerUpdatePath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerUpdate.php'
|
||||
) {
|
||||
try {
|
||||
await phpLoader({
|
||||
file: dockerUpdatePath,
|
||||
method: 'GET',
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
// ignore; offline may keep remote as 'undef'
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Parse Container Statuses
|
||||
*------------------------**/
|
||||
|
||||
private parseStatusesFromDockerPush(js: string): ExplicitStatusItem[] {
|
||||
const matches = parseDockerPushCalls(js);
|
||||
return matches.map(({ name, updateStatus }) => ({
|
||||
name,
|
||||
updateStatus: this.updateStatusToString(updateStatus as StatusItem['updateStatus']),
|
||||
}));
|
||||
}
|
||||
|
||||
private updateStatusToString(updateStatus: 0): UpdateStatus.UP_TO_DATE;
|
||||
private updateStatusToString(updateStatus: 1): UpdateStatus.UPDATE_AVAILABLE;
|
||||
private updateStatusToString(updateStatus: 2): UpdateStatus.REBUILD_READY;
|
||||
private updateStatusToString(updateStatus: 3): UpdateStatus.UNKNOWN;
|
||||
// prettier-ignore
|
||||
private updateStatusToString(updateStatus: StatusItem['updateStatus']): ExplicitStatusItem['updateStatus'];
|
||||
private updateStatusToString(
|
||||
updateStatus: StatusItem['updateStatus']
|
||||
): ExplicitStatusItem['updateStatus'] {
|
||||
switch (updateStatus) {
|
||||
case 0:
|
||||
return UpdateStatus.UP_TO_DATE;
|
||||
case 1:
|
||||
return UpdateStatus.UPDATE_AVAILABLE;
|
||||
case 2:
|
||||
return UpdateStatus.REBUILD_READY;
|
||||
default:
|
||||
return UpdateStatus.UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the update statuses for all containers by triggering `DockerTemplates->getAllInfo(true)` via DockerContainers.php
|
||||
* @param dockerContainersPath - Path to the DockerContainers.php file
|
||||
* @returns The update statuses for all containers
|
||||
*/
|
||||
async getContainerUpdateStatuses(
|
||||
dockerContainersPath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerContainers.php'
|
||||
): Promise<ExplicitStatusItem[]> {
|
||||
const stdout = await phpLoader({
|
||||
file: dockerContainersPath,
|
||||
method: 'GET',
|
||||
});
|
||||
const parts = stdout.split('\0'); // [html, "docker.push(...)", busyFlag]
|
||||
const js = parts[1] || '';
|
||||
return this.parseStatusesFromDockerPush(js);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
/**
|
||||
* Note that these values propagate down to API consumers, so be aware of breaking changes.
|
||||
*/
|
||||
export enum UpdateStatus {
|
||||
UP_TO_DATE = 'UP_TO_DATE',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
}
|
||||
|
||||
registerEnumType(UpdateStatus, {
|
||||
name: 'UpdateStatus',
|
||||
description: 'Update status of a container.',
|
||||
});
|
||||
|
||||
@ObjectType()
|
||||
export class ExplicitStatusItem {
|
||||
@Field(() => String)
|
||||
name!: string;
|
||||
|
||||
@Field(() => UpdateStatus)
|
||||
updateStatus!: UpdateStatus;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Field, ID, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
||||
import { GraphQLBigInt, GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
||||
|
||||
export enum ContainerPortType {
|
||||
TCP = 'TCP',
|
||||
@@ -89,7 +89,10 @@ export class DockerContainer extends Node {
|
||||
@Field(() => [ContainerPort])
|
||||
ports!: ContainerPort[];
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Total size of all the files in the container' })
|
||||
@Field(() => GraphQLBigInt, {
|
||||
nullable: true,
|
||||
description: 'Total size of all files in the container (in bytes)',
|
||||
})
|
||||
sizeRootFs?: number;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true })
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
describe('DockerModule', () => {
|
||||
it('should compile the module', async () => {
|
||||
@@ -18,6 +19,8 @@ describe('DockerModule', () => {
|
||||
})
|
||||
.overrideProvider(DockerService)
|
||||
.useValue({ getDockerClient: vi.fn() })
|
||||
.overrideProvider(DockerOrganizerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.overrideProvider(DockerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.compile();
|
||||
@@ -61,6 +64,7 @@ describe('DockerModule', () => {
|
||||
DockerResolver,
|
||||
{ provide: DockerService, useValue: {} },
|
||||
{ provide: DockerOrganizerService, useValue: {} },
|
||||
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
|
||||
@@ -1,22 +1,36 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [JobModule],
|
||||
providers: [
|
||||
// Services
|
||||
DockerService,
|
||||
DockerConfigService,
|
||||
DockerOrganizerConfigService,
|
||||
DockerOrganizerService,
|
||||
DockerManifestService,
|
||||
DockerPhpService,
|
||||
DockerConfigService,
|
||||
// DockerEventService,
|
||||
|
||||
// Jobs
|
||||
ContainerStatusJob,
|
||||
|
||||
// Resolvers
|
||||
DockerResolver,
|
||||
DockerMutationsResolver,
|
||||
DockerContainerResolver,
|
||||
],
|
||||
exports: [DockerService],
|
||||
})
|
||||
|
||||
@@ -3,10 +3,18 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
vi.mock('@app/unraid-api/utils/graphql-field-helper.js', () => ({
|
||||
GraphQLFieldHelper: {
|
||||
isFieldRequested: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('DockerResolver', () => {
|
||||
let resolver: DockerResolver;
|
||||
@@ -26,7 +34,13 @@ describe('DockerResolver', () => {
|
||||
{
|
||||
provide: DockerOrganizerService,
|
||||
useValue: {
|
||||
getResolvedOrganizer: vi.fn(),
|
||||
resolveOrganizer: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerPhpService,
|
||||
useValue: {
|
||||
getContainerUpdateStatuses: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -34,6 +48,9 @@ describe('DockerResolver', () => {
|
||||
|
||||
resolver = module.get<DockerResolver>(DockerResolver);
|
||||
dockerService = module.get<DockerService>(DockerService);
|
||||
|
||||
// Reset mocks before each test
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
@@ -73,9 +90,75 @@ describe('DockerResolver', () => {
|
||||
},
|
||||
];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const result = await resolver.containers(false);
|
||||
const mockInfo = {} as any;
|
||||
|
||||
const result = await resolver.containers(false, mockInfo);
|
||||
expect(result).toEqual(mockContainers);
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false });
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||
});
|
||||
|
||||
it('should request size when sizeRootFs field is requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [
|
||||
{
|
||||
id: '1',
|
||||
autoStart: false,
|
||||
command: 'test',
|
||||
names: ['test-container'],
|
||||
created: 1234567890,
|
||||
image: 'test-image',
|
||||
imageId: 'test-image-id',
|
||||
ports: [],
|
||||
sizeRootFs: 1024000,
|
||||
state: ContainerState.EXITED,
|
||||
status: 'Exited',
|
||||
},
|
||||
];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
const result = await resolver.containers(false, mockInfo);
|
||||
expect(result).toEqual(mockContainers);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||
});
|
||||
|
||||
it('should request size when GraphQLFieldHelper indicates sizeRootFs is requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(false, mockInfo);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||
});
|
||||
|
||||
it('should not request size when GraphQLFieldHelper indicates sizeRootFs is not requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(false, mockInfo);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||
});
|
||||
|
||||
it('should handle skipCache parameter', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(true, mockInfo);
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: true, size: false });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,23 +1,29 @@
|
||||
import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
import { Args, Info, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import type { GraphQLResolveInfo } from 'graphql';
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import {
|
||||
Docker,
|
||||
DockerContainer,
|
||||
DockerNetwork,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1, ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
@Resolver(() => Docker)
|
||||
export class DockerResolver {
|
||||
constructor(
|
||||
private readonly dockerService: DockerService,
|
||||
private readonly dockerOrganizerService: DockerOrganizerService
|
||||
private readonly dockerOrganizerService: DockerOrganizerService,
|
||||
private readonly dockerPhpService: DockerPhpService
|
||||
) {}
|
||||
|
||||
@UsePermissions({
|
||||
@@ -37,9 +43,11 @@ export class DockerResolver {
|
||||
})
|
||||
@ResolveField(() => [DockerContainer])
|
||||
public async containers(
|
||||
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean
|
||||
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean,
|
||||
@Info() info: GraphQLResolveInfo
|
||||
) {
|
||||
return this.dockerService.getContainers({ skipCache });
|
||||
const requestsSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeRootFs');
|
||||
return this.dockerService.getContainers({ skipCache, size: requestsSize });
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
@@ -53,6 +61,7 @@ export class DockerResolver {
|
||||
return this.dockerService.getNetworks({ skipCache });
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -62,6 +71,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer();
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -80,6 +90,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -96,6 +107,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -108,6 +120,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -123,4 +136,14 @@ export class DockerResolver {
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => [ExplicitStatusItem])
|
||||
public async containerUpdateStatuses() {
|
||||
return this.dockerPhpService.getContainerUpdateStatuses();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,6 +109,65 @@ describe('DockerService', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use separate cache keys for containers with and without size', async () => {
|
||||
const mockContainersWithoutSize = [
|
||||
{
|
||||
Id: 'abc123',
|
||||
Names: ['/test-container'],
|
||||
Image: 'test-image',
|
||||
ImageID: 'test-image-id',
|
||||
Command: 'test',
|
||||
Created: 1234567890,
|
||||
State: 'exited',
|
||||
Status: 'Exited',
|
||||
Ports: [],
|
||||
Labels: {},
|
||||
HostConfig: { NetworkMode: 'bridge' },
|
||||
NetworkSettings: {},
|
||||
Mounts: [],
|
||||
},
|
||||
];
|
||||
|
||||
const mockContainersWithSize = [
|
||||
{
|
||||
Id: 'abc123',
|
||||
Names: ['/test-container'],
|
||||
Image: 'test-image',
|
||||
ImageID: 'test-image-id',
|
||||
Command: 'test',
|
||||
Created: 1234567890,
|
||||
State: 'exited',
|
||||
Status: 'Exited',
|
||||
Ports: [],
|
||||
Labels: {},
|
||||
HostConfig: { NetworkMode: 'bridge' },
|
||||
NetworkSettings: {},
|
||||
Mounts: [],
|
||||
SizeRootFs: 1024000,
|
||||
},
|
||||
];
|
||||
|
||||
// First call without size
|
||||
mockListContainers.mockResolvedValue(mockContainersWithoutSize);
|
||||
mockCacheManager.get.mockResolvedValue(undefined);
|
||||
|
||||
await service.getContainers({ size: false });
|
||||
|
||||
expect(mockCacheManager.set).toHaveBeenCalledWith('docker_containers', expect.any(Array), 60000);
|
||||
|
||||
// Second call with size
|
||||
mockListContainers.mockResolvedValue(mockContainersWithSize);
|
||||
mockCacheManager.get.mockResolvedValue(undefined);
|
||||
|
||||
await service.getContainers({ size: true });
|
||||
|
||||
expect(mockCacheManager.set).toHaveBeenCalledWith(
|
||||
'docker_containers_with_size',
|
||||
expect.any(Array),
|
||||
60000
|
||||
);
|
||||
});
|
||||
|
||||
it('should get containers', async () => {
|
||||
const mockContainers = [
|
||||
{
|
||||
@@ -159,7 +218,7 @@ describe('DockerService', () => {
|
||||
|
||||
expect(mockListContainers).toHaveBeenCalledWith({
|
||||
all: true,
|
||||
size: true,
|
||||
size: false,
|
||||
});
|
||||
expect(mockCacheManager.set).toHaveBeenCalled(); // Ensure cache is set
|
||||
});
|
||||
|
||||
@@ -31,6 +31,7 @@ export class DockerService {
|
||||
private readonly logger = new Logger(DockerService.name);
|
||||
|
||||
public static readonly CONTAINER_CACHE_KEY = 'docker_containers';
|
||||
public static readonly CONTAINER_WITH_SIZE_CACHE_KEY = 'docker_containers_with_size';
|
||||
public static readonly NETWORK_CACHE_KEY = 'docker_networks';
|
||||
public static readonly CACHE_TTL_SECONDS = 60; // Cache for 60 seconds
|
||||
|
||||
@@ -71,6 +72,8 @@ export class DockerService {
|
||||
}
|
||||
|
||||
public transformContainer(container: Docker.ContainerInfo): DockerContainer {
|
||||
const sizeValue = (container as Docker.ContainerInfo & { SizeRootFs?: number }).SizeRootFs;
|
||||
|
||||
const transformed: DockerContainer = {
|
||||
id: container.Id,
|
||||
names: container.Names,
|
||||
@@ -86,7 +89,7 @@ export class DockerService {
|
||||
ContainerPortType[port.Type.toUpperCase() as keyof typeof ContainerPortType] ||
|
||||
ContainerPortType.TCP,
|
||||
})),
|
||||
sizeRootFs: undefined,
|
||||
sizeRootFs: sizeValue,
|
||||
labels: container.Labels ?? {},
|
||||
state:
|
||||
typeof container.State === 'string'
|
||||
@@ -109,21 +112,23 @@ export class DockerService {
|
||||
{
|
||||
skipCache = false,
|
||||
all = true,
|
||||
size = true,
|
||||
size = false,
|
||||
...listOptions
|
||||
}: Partial<ContainerListingOptions> = { skipCache: false }
|
||||
): Promise<DockerContainer[]> {
|
||||
const cacheKey = size
|
||||
? DockerService.CONTAINER_WITH_SIZE_CACHE_KEY
|
||||
: DockerService.CONTAINER_CACHE_KEY;
|
||||
|
||||
if (!skipCache) {
|
||||
const cachedContainers = await this.cacheManager.get<DockerContainer[]>(
|
||||
DockerService.CONTAINER_CACHE_KEY
|
||||
);
|
||||
const cachedContainers = await this.cacheManager.get<DockerContainer[]>(cacheKey);
|
||||
if (cachedContainers) {
|
||||
this.logger.debug('Using docker container cache');
|
||||
this.logger.debug(`Using docker container cache (${size ? 'with' : 'without'} size)`);
|
||||
return cachedContainers;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug('Updating docker container cache');
|
||||
this.logger.debug(`Updating docker container cache (${size ? 'with' : 'without'} size)`);
|
||||
const rawContainers =
|
||||
(await this.client
|
||||
.listContainers({
|
||||
@@ -136,11 +141,7 @@ export class DockerService {
|
||||
this.autoStarts = await this.getAutoStarts();
|
||||
const containers = rawContainers.map((container) => this.transformContainer(container));
|
||||
|
||||
await this.cacheManager.set(
|
||||
DockerService.CONTAINER_CACHE_KEY,
|
||||
containers,
|
||||
DockerService.CACHE_TTL_SECONDS * 1000
|
||||
);
|
||||
await this.cacheManager.set(cacheKey, containers, DockerService.CACHE_TTL_SECONDS * 1000);
|
||||
return containers;
|
||||
}
|
||||
|
||||
@@ -191,15 +192,18 @@ export class DockerService {
|
||||
}
|
||||
|
||||
public async clearContainerCache(): Promise<void> {
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug('Invalidated container cache due to external event.');
|
||||
await Promise.all([
|
||||
this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY),
|
||||
this.cacheManager.del(DockerService.CONTAINER_WITH_SIZE_CACHE_KEY),
|
||||
]);
|
||||
this.logger.debug('Invalidated container caches due to external event.');
|
||||
}
|
||||
|
||||
public async start(id: string): Promise<DockerContainer> {
|
||||
const container = this.client.getContainer(id);
|
||||
await container.start();
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug(`Invalidated container cache after starting ${id}`);
|
||||
await this.clearContainerCache();
|
||||
this.logger.debug(`Invalidated container caches after starting ${id}`);
|
||||
const containers = await this.getContainers({ skipCache: true });
|
||||
const updatedContainer = containers.find((c) => c.id === id);
|
||||
if (!updatedContainer) {
|
||||
@@ -213,8 +217,8 @@ export class DockerService {
|
||||
public async stop(id: string): Promise<DockerContainer> {
|
||||
const container = this.client.getContainer(id);
|
||||
await container.stop({ t: 10 });
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug(`Invalidated container cache after stopping ${id}`);
|
||||
await this.clearContainerCache();
|
||||
this.logger.debug(`Invalidated container caches after stopping ${id}`);
|
||||
|
||||
let containers = await this.getContainers({ skipCache: true });
|
||||
let updatedContainer: DockerContainer | undefined;
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerOrganizerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
}
|
||||
return organizer;
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,17 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import {
|
||||
ContainerPortType,
|
||||
ContainerState,
|
||||
DockerContainer,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
describe('containerToResource', () => {
|
||||
@@ -138,7 +138,7 @@ describe('containerToResource', () => {
|
||||
|
||||
describe('DockerOrganizerService', () => {
|
||||
let service: DockerOrganizerService;
|
||||
let configService: DockerConfigService;
|
||||
let configService: DockerOrganizerConfigService;
|
||||
let dockerService: DockerService;
|
||||
|
||||
const mockOrganizer: OrganizerV1 = {
|
||||
@@ -178,7 +178,7 @@ describe('DockerOrganizerService', () => {
|
||||
providers: [
|
||||
DockerOrganizerService,
|
||||
{
|
||||
provide: DockerConfigService,
|
||||
provide: DockerOrganizerConfigService,
|
||||
useValue: {
|
||||
getConfig: vi.fn().mockImplementation(() => structuredClone(mockOrganizer)),
|
||||
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||
@@ -220,7 +220,7 @@ describe('DockerOrganizerService', () => {
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<DockerOrganizerService>(DockerOrganizerService);
|
||||
configService = moduleRef.get<DockerConfigService>(DockerConfigService);
|
||||
configService = moduleRef.get<DockerOrganizerConfigService>(DockerOrganizerConfigService);
|
||||
dockerService = moduleRef.get<DockerService>(DockerService);
|
||||
});
|
||||
|
||||
@@ -3,9 +3,9 @@ import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { ContainerListOptions } from 'dockerode';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
addMissingResourcesToView,
|
||||
createFolderInView,
|
||||
@@ -47,7 +47,7 @@ export function containerListToResourcesObject(containers: DockerContainer[]): O
|
||||
export class DockerOrganizerService {
|
||||
private readonly logger = new Logger(DockerOrganizerService.name);
|
||||
constructor(
|
||||
private readonly dockerConfigService: DockerConfigService,
|
||||
private readonly dockerConfigService: DockerOrganizerConfigService,
|
||||
private readonly dockerService: DockerService
|
||||
) {}
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { DockerPushMatch } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
describe('parseDockerPushCalls', () => {
|
||||
it('should extract name and update status from valid docker.push call', () => {
|
||||
const jsCode = "docker.push({name:'nginx',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle multiple docker.push calls in same string', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'nginx',update:1});
|
||||
docker.push({name:'mysql',update:0});
|
||||
docker.push({name:'redis',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'nginx', updateStatus: 1 },
|
||||
{ name: 'mysql', updateStatus: 0 },
|
||||
{ name: 'redis', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle docker.push calls with additional properties', () => {
|
||||
const jsCode =
|
||||
"docker.push({id:'123',name:'nginx',version:'latest',update:3,status:'running'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 3 }]);
|
||||
});
|
||||
|
||||
it('should handle different property order', () => {
|
||||
const jsCode = "docker.push({update:2,name:'postgres',id:'456'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'postgres', updateStatus: 2 }]);
|
||||
});
|
||||
|
||||
it('should handle container names with special characters', () => {
|
||||
const jsCode = "docker.push({name:'my-app_v2.0',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'my-app_v2.0', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle whitespace variations', () => {
|
||||
const jsCode = "docker.push({ name: 'nginx' , update: 1 });";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty string', () => {
|
||||
const result = parseDockerPushCalls('');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when no docker.push calls found', () => {
|
||||
const jsCode = "console.log('no docker calls here');";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should ignore malformed docker.push calls', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'valid',update:1});
|
||||
docker.push({name:'missing-update'});
|
||||
docker.push({update:2});
|
||||
docker.push({name:'another-valid',update:0});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'valid', updateStatus: 1 },
|
||||
{ name: 'another-valid', updateStatus: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle all valid update status values', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'container0',update:0});
|
||||
docker.push({name:'container1',update:1});
|
||||
docker.push({name:'container2',update:2});
|
||||
docker.push({name:'container3',update:3});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'container0', updateStatus: 0 },
|
||||
{ name: 'container1', updateStatus: 1 },
|
||||
{ name: 'container2', updateStatus: 2 },
|
||||
{ name: 'container3', updateStatus: 3 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle real-world example with HTML and multiple containers', () => {
|
||||
const jsCode = `
|
||||
<div>some html</div>
|
||||
docker.push({id:'abc123',name:'plex',version:'1.32',update:1,autostart:true});
|
||||
docker.push({id:'def456',name:'nextcloud',version:'latest',update:0,ports:'80:8080'});
|
||||
<script>more content</script>
|
||||
docker.push({id:'ghi789',name:'homeassistant',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'plex', updateStatus: 1 },
|
||||
{ name: 'nextcloud', updateStatus: 0 },
|
||||
{ name: 'homeassistant', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle nested braces in other properties', () => {
|
||||
const jsCode = 'docker.push({config:\'{"nested":"value"}\',name:\'test\',update:1});';
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'test', updateStatus: 1 }]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,24 @@
|
||||
export interface DockerPushMatch {
|
||||
name: string;
|
||||
updateStatus: number;
|
||||
}
|
||||
|
||||
export function parseDockerPushCalls(jsCode: string): DockerPushMatch[] {
|
||||
const dockerPushRegex = /docker\.push\(\{[^}]*(?:(?:[^{}]|{[^}]*})*)\}\);/g;
|
||||
const matches: DockerPushMatch[] = [];
|
||||
|
||||
for (const match of jsCode.matchAll(dockerPushRegex)) {
|
||||
const objectContent = match[0];
|
||||
|
||||
const nameMatch = objectContent.match(/name\s*:\s*'([^']+)'/);
|
||||
const updateMatch = objectContent.match(/update\s*:\s*(\d)/);
|
||||
|
||||
if (nameMatch && updateMatch) {
|
||||
const name = nameMatch[1];
|
||||
const updateStatus = Number(updateMatch[1]);
|
||||
matches.push({ name, updateStatus });
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
|
||||
import crypto from 'crypto';
|
||||
import { ChildProcess } from 'node:child_process';
|
||||
import { mkdir, rm, writeFile } from 'node:fs/promises';
|
||||
@@ -7,6 +7,7 @@ import { dirname, join } from 'node:path';
|
||||
import { execa } from 'execa';
|
||||
import got, { HTTPError } from 'got';
|
||||
import pRetry from 'p-retry';
|
||||
import semver from 'semver';
|
||||
|
||||
import { sanitizeParams } from '@app/core/log.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
@@ -25,7 +26,7 @@ import {
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
private isInitialized: boolean = false;
|
||||
private readonly logger = new Logger(RCloneApiService.name);
|
||||
private rcloneSocketPath: string = '';
|
||||
@@ -44,7 +45,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
return this.isInitialized;
|
||||
}
|
||||
|
||||
async onModuleInit(): Promise<void> {
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
// RClone startup disabled - early return
|
||||
if (ENVIRONMENT === 'production') {
|
||||
this.logger.debug('RClone startup is disabled');
|
||||
@@ -239,12 +240,41 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the RClone binary is available on the system
|
||||
* Checks if the RClone binary is available on the system and meets minimum version requirements
|
||||
*/
|
||||
private async checkRcloneBinaryExists(): Promise<boolean> {
|
||||
try {
|
||||
await execa('rclone', ['version']);
|
||||
this.logger.debug('RClone binary is available on the system.');
|
||||
const result = await execa('rclone', ['version']);
|
||||
const versionOutput = result.stdout.trim();
|
||||
|
||||
// Extract raw version string (format: "rclone vX.XX.X" or "rclone vX.XX.X-beta.X")
|
||||
const versionMatch = versionOutput.match(/rclone v([\d.\-\w]+)/);
|
||||
if (!versionMatch) {
|
||||
this.logger.error('Unable to parse RClone version from output');
|
||||
return false;
|
||||
}
|
||||
|
||||
const rawVersion = versionMatch[1];
|
||||
|
||||
// Use semver.coerce to get base semver from prerelease versions
|
||||
const coercedVersion = semver.coerce(rawVersion);
|
||||
if (!coercedVersion) {
|
||||
this.logger.error(`Failed to parse RClone version: raw="${rawVersion}"`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const minimumVersion = '1.70.0';
|
||||
|
||||
if (!semver.gte(coercedVersion, minimumVersion)) {
|
||||
this.logger.error(
|
||||
`RClone version ${rawVersion} (coerced: ${coercedVersion}) is too old. Minimum required version is ${minimumVersion}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`RClone binary is available on the system (version ${rawVersion}, coerced: ${coercedVersion}).`
|
||||
);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
|
||||
350
api/src/unraid-api/rest/rest.service.test.ts
Normal file
350
api/src/unraid-api/rest/rest.service.test.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import type { ReadStream, Stats } from 'node:fs';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { stat, writeFile } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import { execa, ExecaError } from 'execa';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { ApiReportData } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { RestService } from '@app/unraid-api/rest/rest.service.js';
|
||||
|
||||
vi.mock('node:fs');
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/store/index.js');
|
||||
vi.mock('@app/core/utils/images/image-file-helpers.js', () => ({
|
||||
getBannerPathIfPresent: vi.fn(),
|
||||
getCasePathIfPresent: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('RestService', () => {
|
||||
let service: RestService;
|
||||
let apiReportService: ApiReportService;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
RestService,
|
||||
{
|
||||
provide: ApiReportService,
|
||||
useValue: {
|
||||
generateReport: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<RestService>(RestService);
|
||||
apiReportService = module.get<ApiReportService>(ApiReportService);
|
||||
});
|
||||
|
||||
describe('getLogs', () => {
|
||||
const mockLogPath = '/usr/local/emhttp/logs/unraid-api';
|
||||
const mockGraphqlApiLog = '/var/log/graphql-api.log';
|
||||
const mockZipPath = '/usr/local/emhttp/logs/unraid-api.tar.gz';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(getters).paths = vi.fn().mockReturnValue({
|
||||
'log-base': mockLogPath,
|
||||
});
|
||||
// Mock saveApiReport to avoid side effects
|
||||
vi.spyOn(service as any, 'saveApiReport').mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it('should create and return log archive successfully', async () => {
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath || path === mockZipPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getLogs();
|
||||
|
||||
expect(execa).toHaveBeenCalledWith('tar', ['-czf', mockZipPath, mockLogPath], {
|
||||
timeout: 60000,
|
||||
reject: true,
|
||||
});
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockZipPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should include graphql-api.log when it exists', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath || path === mockGraphqlApiLog || path === mockZipPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
vi.mocked(createReadStream).mockReturnValue(Readable.from([]) as ReadStream);
|
||||
|
||||
await service.getLogs();
|
||||
|
||||
expect(execa).toHaveBeenCalledWith(
|
||||
'tar',
|
||||
['-czf', mockZipPath, mockLogPath, mockGraphqlApiLog],
|
||||
{
|
||||
timeout: 60000,
|
||||
reject: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle timeout errors with detailed message', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const timeoutError = new Error('Command timed out') as ExecaError;
|
||||
timeoutError.timedOut = true;
|
||||
timeoutError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
timeoutError.exitCode = undefined;
|
||||
timeoutError.stderr = '';
|
||||
timeoutError.stdout = '';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(timeoutError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Tar command timed out after 60 seconds');
|
||||
});
|
||||
|
||||
it('should handle command failure with exit code and stderr', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const execError = new Error('Command failed') as ExecaError;
|
||||
execError.exitCode = 1;
|
||||
execError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
execError.stderr = 'tar: Cannot create archive';
|
||||
execError.stdout = '';
|
||||
execError.shortMessage = 'Command failed with exit code 1';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(execError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Tar command failed with exit code 1');
|
||||
await expect(service.getLogs()).rejects.toThrow('tar: Cannot create archive');
|
||||
});
|
||||
|
||||
it('should handle case when tar succeeds but zip file is not created', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
// Zip file doesn't exist after tar command
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow(
|
||||
'Failed to create log zip - tar file not found after successful command'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when log path does not exist', async () => {
|
||||
vi.mocked(stat).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('No logs to download');
|
||||
});
|
||||
|
||||
it('should handle generic errors', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const genericError = new Error('Unexpected error');
|
||||
vi.mocked(execa).mockRejectedValue(genericError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow(
|
||||
'Failed to create logs archive: Unexpected error'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors with stdout in addition to stderr', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const execError = new Error('Command failed') as ExecaError;
|
||||
execError.exitCode = 1;
|
||||
execError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
execError.stderr = 'tar: Error';
|
||||
execError.stdout = 'Processing archive...';
|
||||
execError.shortMessage = 'Command failed with exit code 1';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(execError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Stdout: Processing archive');
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveApiReport', () => {
|
||||
it('should generate and save API report', async () => {
|
||||
const mockReport: ApiReportData = {
|
||||
timestamp: new Date().toISOString(),
|
||||
connectionStatus: { running: 'yes' },
|
||||
system: {
|
||||
name: 'Test Server',
|
||||
version: '6.12.0',
|
||||
machineId: 'test-machine-id',
|
||||
},
|
||||
connect: {
|
||||
installed: false,
|
||||
},
|
||||
config: {
|
||||
valid: true,
|
||||
},
|
||||
services: {
|
||||
cloud: null,
|
||||
minigraph: null,
|
||||
allServices: [],
|
||||
},
|
||||
};
|
||||
const mockPath = '/test/report.json';
|
||||
|
||||
vi.mocked(apiReportService.generateReport).mockResolvedValue(mockReport);
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await service.saveApiReport(mockPath);
|
||||
|
||||
expect(apiReportService.generateReport).toHaveBeenCalled();
|
||||
expect(writeFile).toHaveBeenCalledWith(
|
||||
mockPath,
|
||||
JSON.stringify(mockReport, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when generating report', async () => {
|
||||
const mockPath = '/test/report.json';
|
||||
|
||||
vi.mocked(apiReportService.generateReport).mockRejectedValue(
|
||||
new Error('Report generation failed')
|
||||
);
|
||||
|
||||
// Should not throw, just log warning
|
||||
await expect(service.saveApiReport(mockPath)).resolves.toBeUndefined();
|
||||
expect(apiReportService.generateReport).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomizationPath', () => {
|
||||
it('should return banner path when type is banner', async () => {
|
||||
const mockBannerPath = '/path/to/banner.png';
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(mockBannerPath);
|
||||
|
||||
const result = await service.getCustomizationPath('banner');
|
||||
|
||||
expect(getBannerPathIfPresent).toHaveBeenCalled();
|
||||
expect(result).toBe(mockBannerPath);
|
||||
});
|
||||
|
||||
it('should return case path when type is case', async () => {
|
||||
const mockCasePath = '/path/to/case.png';
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(mockCasePath);
|
||||
|
||||
const result = await service.getCustomizationPath('case');
|
||||
|
||||
expect(getCasePathIfPresent).toHaveBeenCalled();
|
||||
expect(result).toBe(mockCasePath);
|
||||
});
|
||||
|
||||
it('should return null when no banner found', async () => {
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(null);
|
||||
|
||||
const result = await service.getCustomizationPath('banner');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when no case found', async () => {
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(null);
|
||||
|
||||
const result = await service.getCustomizationPath('case');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomizationStream', () => {
|
||||
it('should return read stream for banner', async () => {
|
||||
const mockPath = '/path/to/banner.png';
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(mockPath);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getCustomizationStream('banner');
|
||||
|
||||
expect(getBannerPathIfPresent).toHaveBeenCalled();
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should return read stream for case', async () => {
|
||||
const mockPath = '/path/to/case.png';
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(mockPath);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getCustomizationStream('case');
|
||||
|
||||
expect(getCasePathIfPresent).toHaveBeenCalled();
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should throw error when no banner found', async () => {
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(null);
|
||||
|
||||
await expect(service.getCustomizationStream('banner')).rejects.toThrow('No banner found');
|
||||
});
|
||||
|
||||
it('should throw error when no case found', async () => {
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(null);
|
||||
|
||||
await expect(service.getCustomizationStream('case')).rejects.toThrow('No case found');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,7 @@ import { createReadStream } from 'node:fs';
|
||||
import { stat, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { ExecaError } from 'execa';
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
@@ -31,6 +32,8 @@ export class RestService {
|
||||
|
||||
async getLogs(): Promise<ReadStream> {
|
||||
const logPath = getters.paths()['log-base'];
|
||||
const graphqlApiLog = '/var/log/graphql-api.log';
|
||||
|
||||
try {
|
||||
await this.saveApiReport(join(logPath, 'report.json'));
|
||||
} catch (error) {
|
||||
@@ -41,16 +44,62 @@ export class RestService {
|
||||
const logPathExists = Boolean(await stat(logPath).catch(() => null));
|
||||
if (logPathExists) {
|
||||
try {
|
||||
await execa('tar', ['-czf', zipToWrite, logPath]);
|
||||
// Build tar command arguments
|
||||
const tarArgs = ['-czf', zipToWrite, logPath];
|
||||
|
||||
// Check if graphql-api.log exists and add it to the archive
|
||||
const graphqlLogExists = Boolean(await stat(graphqlApiLog).catch(() => null));
|
||||
if (graphqlLogExists) {
|
||||
tarArgs.push(graphqlApiLog);
|
||||
this.logger.debug('Including graphql-api.log in archive');
|
||||
}
|
||||
|
||||
// Execute tar with timeout and capture output
|
||||
await execa('tar', tarArgs, {
|
||||
timeout: 60000, // 60 seconds timeout for tar operation
|
||||
reject: true, // Throw on non-zero exit (default behavior)
|
||||
});
|
||||
|
||||
const tarFileExists = Boolean(await stat(zipToWrite).catch(() => null));
|
||||
|
||||
if (tarFileExists) {
|
||||
return createReadStream(zipToWrite);
|
||||
} else {
|
||||
throw new Error('Failed to create log zip');
|
||||
throw new Error(
|
||||
'Failed to create log zip - tar file not found after successful command'
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('Failed to create logs');
|
||||
// Build detailed error message with execa's built-in error info
|
||||
let errorMessage = 'Failed to create logs archive';
|
||||
|
||||
if (error && typeof error === 'object' && 'command' in error) {
|
||||
const execaError = error as ExecaError;
|
||||
|
||||
if (execaError.timedOut) {
|
||||
errorMessage = `Tar command timed out after 60 seconds. Command: ${execaError.command}`;
|
||||
} else if (execaError.exitCode !== undefined) {
|
||||
errorMessage = `Tar command failed with exit code ${execaError.exitCode}. Command: ${execaError.command}`;
|
||||
}
|
||||
|
||||
// Add stderr/stdout if available
|
||||
if (execaError.stderr) {
|
||||
errorMessage += `. Stderr: ${execaError.stderr}`;
|
||||
}
|
||||
if (execaError.stdout) {
|
||||
errorMessage += `. Stdout: ${execaError.stdout}`;
|
||||
}
|
||||
|
||||
// Include the short message from execa
|
||||
if (execaError.shortMessage) {
|
||||
errorMessage += `. Details: ${execaError.shortMessage}`;
|
||||
}
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage += `: ${error.message}`;
|
||||
}
|
||||
|
||||
this.logger.error(errorMessage, error);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
} else {
|
||||
throw new Error('No logs to download');
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
import {
|
||||
FileModification,
|
||||
ShouldApplyWithReason,
|
||||
} from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
export default class DisplaySettingsModification extends FileModification {
|
||||
id: string = 'display-settings';
|
||||
@@ -34,4 +37,15 @@ export default class DisplaySettingsModification extends FileModification {
|
||||
|
||||
return this.createPatchWithDiff(overridePath ?? this.filePath, fileContent, newContent);
|
||||
}
|
||||
|
||||
async shouldApply(): Promise<ShouldApplyWithReason> {
|
||||
const superShouldApply = await super.shouldApply();
|
||||
if (!superShouldApply.shouldApply) {
|
||||
return superShouldApply;
|
||||
}
|
||||
return {
|
||||
shouldApply: true,
|
||||
reason: 'Display settings modification needed for Unraid version <= 7.2.0-beta.2.3',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
28
api/src/unraid-api/utils/feature-flag.helper.ts
Normal file
28
api/src/unraid-api/utils/feature-flag.helper.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { ForbiddenException } from '@nestjs/common';
|
||||
|
||||
/**
|
||||
* Checks if a feature flag is enabled and throws an exception if disabled.
|
||||
* Use this at the beginning of resolver methods for immediate feature flag checks.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @ResolveField(() => String)
|
||||
* async organizer() {
|
||||
* checkFeatureFlag(FeatureFlags, 'ENABLE_NEXT_DOCKER_RELEASE');
|
||||
* return this.dockerOrganizerService.resolveOrganizer();
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param flags - The feature flag object containing boolean/truthy values
|
||||
* @param key - The key within the feature flag object to check
|
||||
* @throws ForbiddenException if the feature flag is disabled
|
||||
*/
|
||||
export function checkFeatureFlag<T extends Record<string, any>>(flags: T, key: keyof T): void {
|
||||
const isEnabled = Boolean(flags[key]);
|
||||
|
||||
if (!isEnabled) {
|
||||
throw new ForbiddenException(
|
||||
`Feature "${String(key)}" is currently disabled. This functionality is not available at this time.`
|
||||
);
|
||||
}
|
||||
}
|
||||
332
api/src/unraid-api/utils/graphql-field-helper.spec.ts
Normal file
332
api/src/unraid-api/utils/graphql-field-helper.spec.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
import { buildSchema, FieldNode, GraphQLResolveInfo, parse } from 'graphql';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
describe('GraphQLFieldHelper', () => {
|
||||
const schema = buildSchema(`
|
||||
type User {
|
||||
id: String
|
||||
name: String
|
||||
email: String
|
||||
profile: Profile
|
||||
posts: [Post]
|
||||
settings: Settings
|
||||
}
|
||||
|
||||
type Profile {
|
||||
avatar: String
|
||||
bio: String
|
||||
}
|
||||
|
||||
type Post {
|
||||
title: String
|
||||
content: String
|
||||
}
|
||||
|
||||
type Settings {
|
||||
theme: String
|
||||
language: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
user: User
|
||||
users: [User]
|
||||
}
|
||||
`);
|
||||
|
||||
const createMockInfo = (query: string): GraphQLResolveInfo => {
|
||||
const document = parse(query);
|
||||
const operation = document.definitions[0] as any;
|
||||
const fieldNode = operation.selectionSet.selections[0] as FieldNode;
|
||||
|
||||
return {
|
||||
fieldName: fieldNode.name.value,
|
||||
fieldNodes: [fieldNode],
|
||||
returnType: schema.getType('User') as any,
|
||||
parentType: schema.getType('Query') as any,
|
||||
path: { prev: undefined, key: fieldNode.name.value, typename: 'Query' },
|
||||
schema,
|
||||
fragments: {},
|
||||
rootValue: {},
|
||||
operation,
|
||||
variableValues: {},
|
||||
} as GraphQLResolveInfo;
|
||||
};
|
||||
|
||||
describe('getRequestedFields', () => {
|
||||
it('should return flat fields structure', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fields = GraphQLFieldHelper.getRequestedFields(mockInfo);
|
||||
|
||||
expect(fields).toEqual({
|
||||
id: {},
|
||||
name: {},
|
||||
email: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return nested fields structure', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
settings {
|
||||
theme
|
||||
language
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fields = GraphQLFieldHelper.getRequestedFields(mockInfo);
|
||||
|
||||
expect(fields).toEqual({
|
||||
id: {},
|
||||
profile: {
|
||||
avatar: {},
|
||||
bio: {},
|
||||
},
|
||||
settings: {
|
||||
theme: {},
|
||||
language: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFieldRequested', () => {
|
||||
it('should return true for requested top-level field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'id')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'name')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'email')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-requested field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'email')).toBe(false);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle nested field paths', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile.avatar')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile.bio')).toBe(false);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'settings')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRequestedFieldsList', () => {
|
||||
it('should return list of top-level field names', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fieldsList = GraphQLFieldHelper.getRequestedFieldsList(mockInfo);
|
||||
|
||||
expect(fieldsList).toEqual(['id', 'name', 'email', 'profile']);
|
||||
});
|
||||
|
||||
it('should return empty array for no fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user
|
||||
}
|
||||
`);
|
||||
|
||||
const fieldsList = GraphQLFieldHelper.getRequestedFieldsList(mockInfo);
|
||||
|
||||
expect(fieldsList).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasNestedFields', () => {
|
||||
it('should return true when field has nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'profile')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when field has no nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'id')).toBe(false);
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'name')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-existent field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'profile')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNestedFields', () => {
|
||||
it('should return nested fields object', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const nestedFields = GraphQLFieldHelper.getNestedFields(mockInfo, 'profile');
|
||||
|
||||
expect(nestedFields).toEqual({
|
||||
avatar: {},
|
||||
bio: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null for field without nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'id')).toBeNull();
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'name')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for non-existent field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'profile')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('shouldFetchRelation', () => {
|
||||
it('should return true when relation is requested with nested fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
posts {
|
||||
title
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'profile')).toBe(true);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'posts')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when relation has no nested fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'id')).toBe(false);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'name')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when relation is not requested', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'profile')).toBe(false);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'posts')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
63
api/src/unraid-api/utils/graphql-field-helper.ts
Normal file
63
api/src/unraid-api/utils/graphql-field-helper.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { GraphQLResolveInfo } from 'graphql';
|
||||
import graphqlFields from 'graphql-fields';
|
||||
|
||||
export interface RequestedFields {
|
||||
[key: string]: RequestedFields | {};
|
||||
}
|
||||
|
||||
export interface GraphQLFieldOptions {
|
||||
processArguments?: boolean;
|
||||
excludedFields?: string[];
|
||||
}
|
||||
|
||||
export class GraphQLFieldHelper {
|
||||
static getRequestedFields(info: GraphQLResolveInfo, options?: GraphQLFieldOptions): RequestedFields {
|
||||
return graphqlFields(info, {}, options);
|
||||
}
|
||||
|
||||
static isFieldRequested(info: GraphQLResolveInfo, fieldPath: string): boolean {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const pathParts = fieldPath.split('.');
|
||||
|
||||
let current: RequestedFields | {} = fields;
|
||||
for (const part of pathParts) {
|
||||
if (!(part in current)) {
|
||||
return false;
|
||||
}
|
||||
current = current[part as keyof typeof current] as RequestedFields | {};
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static getRequestedFieldsList(info: GraphQLResolveInfo): string[] {
|
||||
const fields = this.getRequestedFields(info);
|
||||
return Object.keys(fields);
|
||||
}
|
||||
|
||||
static hasNestedFields(info: GraphQLResolveInfo, fieldName: string): boolean {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const field = fields[fieldName];
|
||||
return field !== undefined && Object.keys(field).length > 0;
|
||||
}
|
||||
|
||||
static getNestedFields(info: GraphQLResolveInfo, fieldName: string): RequestedFields | null {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const field = fields[fieldName];
|
||||
|
||||
if (!field || typeof field !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// graphql-fields returns {} for fields without nested selections
|
||||
if (Object.keys(field).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return field as RequestedFields;
|
||||
}
|
||||
|
||||
static shouldFetchRelation(info: GraphQLResolveInfo, relationName: string): boolean {
|
||||
return this.isFieldRequested(info, relationName) && this.hasNestedFields(info, relationName);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { basename, join } from 'node:path';
|
||||
|
||||
import type { ViteUserConfig } from 'vitest/config';
|
||||
import { viteCommonjs } from '@originjs/vite-plugin-commonjs';
|
||||
import nodeResolve from '@rollup/plugin-node-resolve';
|
||||
@@ -70,6 +73,29 @@ export default defineConfig(({ mode }): ViteUserConfig => {
|
||||
},
|
||||
},
|
||||
}),
|
||||
// Copy PHP files to assets directory
|
||||
{
|
||||
name: 'copy-php-files',
|
||||
buildStart() {
|
||||
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
|
||||
phpFiles.forEach((file) => this.addWatchFile(file));
|
||||
},
|
||||
async generateBundle() {
|
||||
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
|
||||
phpFiles.forEach((file) => {
|
||||
if (!existsSync(file)) {
|
||||
this.warn(`[copy-php-files] PHP file ${file} does not exist`);
|
||||
return;
|
||||
}
|
||||
const content = readFileSync(file);
|
||||
this.emitFile({
|
||||
type: 'asset',
|
||||
fileName: join('assets', basename(file)),
|
||||
source: content,
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
define: {
|
||||
// Allows vite to preserve process.env variables and not hardcode them
|
||||
|
||||
14
package.json
14
package.json
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"name": "unraid-monorepo",
|
||||
"private": true,
|
||||
"version": "4.20.1",
|
||||
"version": "4.22.2",
|
||||
"scripts": {
|
||||
"build": "pnpm -r build",
|
||||
"build:watch": " pnpm -r --parallel build:watch",
|
||||
"build:watch": "pnpm -r --parallel --filter '!@unraid/ui' build:watch",
|
||||
"codegen": "pnpm -r codegen",
|
||||
"dev": "pnpm -r dev",
|
||||
"unraid:deploy": "pnpm -r unraid:deploy",
|
||||
@@ -63,8 +63,14 @@
|
||||
"pre-commit": "pnpm lint-staged"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,vue}": [
|
||||
"pnpm lint:fix"
|
||||
"api/**/*.{js,ts}": [
|
||||
"pnpm --filter api lint:fix"
|
||||
],
|
||||
"web/**/*.{js,ts,tsx,vue}": [
|
||||
"pnpm --filter web lint:fix"
|
||||
],
|
||||
"unraid-ui/**/*.{js,ts,tsx,vue}": [
|
||||
"pnpm --filter @unraid/ui lint:fix"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.15.0"
|
||||
|
||||
@@ -17,6 +17,7 @@ const config: CodegenConfig = {
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
BigInt: 'number',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
@@ -24,6 +25,7 @@ const config: CodegenConfig = {
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
BigInt: 'z.number()',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
|
||||
@@ -25,8 +25,8 @@
|
||||
"description": "Unraid Connect plugin for Unraid API",
|
||||
"devDependencies": {
|
||||
"@apollo/client": "3.14.0",
|
||||
"@faker-js/faker": "9.9.0",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@faker-js/faker": "10.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
@@ -43,7 +43,7 @@
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
@@ -60,7 +60,7 @@
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "6.0.1",
|
||||
"rxjs": "7.8.2",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"undici": "7.15.0",
|
||||
"vitest": "3.2.4",
|
||||
@@ -84,7 +84,7 @@
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@runonflux/nat-upnp": "1.0.2",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
|
||||
@@ -731,10 +731,17 @@ export type RemoteGraphQlEventFragmentFragment = { __typename?: 'RemoteGraphQLEv
|
||||
export type EventsSubscriptionVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type EventsSubscription = { __typename?: 'Subscription', events?: Array<{ __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientPingEvent' } | { __typename: 'RemoteAccessEvent' } | (
|
||||
{ __typename: 'RemoteGraphQLEvent' }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQlEventFragmentFragment': RemoteGraphQlEventFragmentFragment } }
|
||||
) | { __typename: 'UpdateEvent' }> | null };
|
||||
export type EventsSubscription = { __typename?: 'Subscription', events?: Array<
|
||||
| { __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } }
|
||||
| { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } }
|
||||
| { __typename: 'ClientPingEvent' }
|
||||
| { __typename: 'RemoteAccessEvent' }
|
||||
| (
|
||||
{ __typename: 'RemoteGraphQLEvent' }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQlEventFragmentFragment': RemoteGraphQlEventFragmentFragment } }
|
||||
)
|
||||
| { __typename: 'UpdateEvent' }
|
||||
> | null };
|
||||
|
||||
export type SendRemoteGraphQlResponseMutationVariables = Exact<{
|
||||
input: RemoteGraphQlServerInput;
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
"nest-authz": "2.17.0",
|
||||
"pify": "6.1.0",
|
||||
"rimraf": "6.0.1",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"vitest": "3.2.4",
|
||||
"ws": "8.18.3"
|
||||
|
||||
295
packages/unraid-shared/src/util/__tests__/processing.test.ts
Normal file
295
packages/unraid-shared/src/util/__tests__/processing.test.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { AsyncMutex } from '../processing.js';
|
||||
|
||||
describe('AsyncMutex', () => {
|
||||
|
||||
describe('constructor-based operation', () => {
|
||||
it('should execute the default operation when do() is called without parameters', async () => {
|
||||
const mockOperation = vi.fn().mockResolvedValue('result');
|
||||
const mutex = new AsyncMutex(mockOperation);
|
||||
|
||||
const result = await mutex.do();
|
||||
|
||||
expect(result).toBe('result');
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return the same promise when multiple calls are made concurrently', async () => {
|
||||
let resolveOperation: (value: string) => void;
|
||||
const operationPromise = new Promise<string>((resolve) => {
|
||||
resolveOperation = resolve;
|
||||
});
|
||||
const mockOperation = vi.fn().mockReturnValue(operationPromise);
|
||||
const mutex = new AsyncMutex(mockOperation);
|
||||
|
||||
const promise1 = mutex.do();
|
||||
const promise2 = mutex.do();
|
||||
const promise3 = mutex.do();
|
||||
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
expect(promise1).toBe(promise2);
|
||||
expect(promise2).toBe(promise3);
|
||||
|
||||
resolveOperation!('result');
|
||||
const [result1, result2, result3] = await Promise.all([promise1, promise2, promise3]);
|
||||
|
||||
expect(result1).toBe('result');
|
||||
expect(result2).toBe('result');
|
||||
expect(result3).toBe('result');
|
||||
});
|
||||
|
||||
it('should allow new operations after the first completes', async () => {
|
||||
const mockOperation = vi.fn()
|
||||
.mockResolvedValueOnce('first')
|
||||
.mockResolvedValueOnce('second');
|
||||
const mutex = new AsyncMutex(mockOperation);
|
||||
|
||||
const result1 = await mutex.do();
|
||||
expect(result1).toBe('first');
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
|
||||
const result2 = await mutex.do();
|
||||
expect(result2).toBe('second');
|
||||
expect(mockOperation).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle errors in the default operation', async () => {
|
||||
const error = new Error('Operation failed');
|
||||
const mockOperation = vi.fn().mockRejectedValue(error);
|
||||
const mutex = new AsyncMutex(mockOperation);
|
||||
|
||||
await expect(mutex.do()).rejects.toThrow(error);
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
|
||||
const secondOperation = vi.fn().mockResolvedValue('success');
|
||||
const mutex2 = new AsyncMutex(secondOperation);
|
||||
const result = await mutex2.do();
|
||||
expect(result).toBe('success');
|
||||
});
|
||||
});
|
||||
|
||||
describe('per-call operation', () => {
|
||||
it('should execute the provided operation', async () => {
|
||||
const mutex = new AsyncMutex<number>();
|
||||
const mockOperation = vi.fn().mockResolvedValue(42);
|
||||
|
||||
const result = await mutex.do(mockOperation);
|
||||
|
||||
expect(result).toBe(42);
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return the same promise for concurrent calls with same operation type', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
let resolveOperation: (value: string) => void;
|
||||
const operationPromise = new Promise<string>((resolve) => {
|
||||
resolveOperation = resolve;
|
||||
});
|
||||
const mockOperation = vi.fn().mockReturnValue(operationPromise);
|
||||
|
||||
const promise1 = mutex.do(mockOperation);
|
||||
const promise2 = mutex.do(mockOperation);
|
||||
const promise3 = mutex.do(mockOperation);
|
||||
|
||||
expect(mockOperation).toHaveBeenCalledTimes(1);
|
||||
expect(promise1).toBe(promise2);
|
||||
expect(promise2).toBe(promise3);
|
||||
|
||||
resolveOperation!('shared-result');
|
||||
const [result1, result2, result3] = await Promise.all([promise1, promise2, promise3]);
|
||||
|
||||
expect(result1).toBe('shared-result');
|
||||
expect(result2).toBe('shared-result');
|
||||
expect(result3).toBe('shared-result');
|
||||
});
|
||||
|
||||
it('should allow different operations with different types', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
|
||||
const stringOp = vi.fn().mockResolvedValue('string-result');
|
||||
const numberOp = vi.fn().mockResolvedValue(123);
|
||||
|
||||
const stringResult = await mutex.do(stringOp);
|
||||
const numberResult = await mutex.do(numberOp);
|
||||
|
||||
expect(stringResult).toBe('string-result');
|
||||
expect(numberResult).toBe(123);
|
||||
expect(stringOp).toHaveBeenCalledTimes(1);
|
||||
expect(numberOp).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle errors in per-call operations', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
const error = new Error('Operation failed');
|
||||
const failingOp = vi.fn().mockRejectedValue(error);
|
||||
|
||||
await expect(mutex.do(failingOp)).rejects.toThrow(error);
|
||||
expect(failingOp).toHaveBeenCalledTimes(1);
|
||||
|
||||
const successOp = vi.fn().mockResolvedValue('success');
|
||||
const result = await mutex.do(successOp);
|
||||
expect(result).toBe('success');
|
||||
expect(successOp).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should throw an error when no operation is provided and no default is set', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
|
||||
await expect(mutex.do()).rejects.toThrow('No operation provided and no default operation set');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mixed usage', () => {
|
||||
it('should allow overriding default operation with per-call operation', async () => {
|
||||
const defaultOp = vi.fn().mockResolvedValue('default');
|
||||
const mutex = new AsyncMutex(defaultOp);
|
||||
|
||||
const customOp = vi.fn().mockResolvedValue('custom');
|
||||
|
||||
const customResult = await mutex.do(customOp);
|
||||
expect(customResult).toBe('custom');
|
||||
expect(customOp).toHaveBeenCalledTimes(1);
|
||||
expect(defaultOp).not.toHaveBeenCalled();
|
||||
|
||||
const defaultResult = await mutex.do();
|
||||
expect(defaultResult).toBe('default');
|
||||
expect(defaultOp).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should share lock between default and custom operations', async () => {
|
||||
let resolveDefault: (value: string) => void;
|
||||
const defaultPromise = new Promise<string>((resolve) => {
|
||||
resolveDefault = resolve;
|
||||
});
|
||||
const defaultOp = vi.fn().mockReturnValue(defaultPromise);
|
||||
const mutex = new AsyncMutex(defaultOp);
|
||||
|
||||
const customOp = vi.fn().mockResolvedValue('custom');
|
||||
|
||||
const defaultCall = mutex.do();
|
||||
const customCall = mutex.do(customOp);
|
||||
|
||||
expect(defaultOp).toHaveBeenCalledTimes(1);
|
||||
expect(customOp).not.toHaveBeenCalled();
|
||||
expect(customCall).toBe(defaultCall);
|
||||
|
||||
resolveDefault!('default');
|
||||
const [defaultResult, customResult] = await Promise.all([defaultCall, customCall]);
|
||||
|
||||
expect(defaultResult).toBe('default');
|
||||
expect(customResult).toBe('default');
|
||||
});
|
||||
});
|
||||
|
||||
describe('timing and concurrency', () => {
|
||||
it('should handle sequential slow operations', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
let callCount = 0;
|
||||
|
||||
const slowOp = vi.fn().mockImplementation(() => {
|
||||
return new Promise((resolve) => {
|
||||
const currentCall = ++callCount;
|
||||
setTimeout(() => resolve(`result-${currentCall}`), 100);
|
||||
});
|
||||
});
|
||||
|
||||
const result1 = await mutex.do(slowOp);
|
||||
expect(result1).toBe('result-1');
|
||||
|
||||
const result2 = await mutex.do(slowOp);
|
||||
expect(result2).toBe('result-2');
|
||||
|
||||
expect(slowOp).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should deduplicate concurrent slow operations', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
let resolveOperation: (value: string) => void;
|
||||
|
||||
const slowOp = vi.fn().mockImplementation(() => {
|
||||
return new Promise<string>((resolve) => {
|
||||
resolveOperation = resolve;
|
||||
});
|
||||
});
|
||||
|
||||
const promises = [
|
||||
mutex.do(slowOp),
|
||||
mutex.do(slowOp),
|
||||
mutex.do(slowOp),
|
||||
mutex.do(slowOp),
|
||||
mutex.do(slowOp)
|
||||
];
|
||||
|
||||
expect(slowOp).toHaveBeenCalledTimes(1);
|
||||
|
||||
resolveOperation!('shared-slow-result');
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
expect(results).toEqual([
|
||||
'shared-slow-result',
|
||||
'shared-slow-result',
|
||||
'shared-slow-result',
|
||||
'shared-slow-result',
|
||||
'shared-slow-result'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should properly clean up after operation completes', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
const op1 = vi.fn().mockResolvedValue('first');
|
||||
const op2 = vi.fn().mockResolvedValue('second');
|
||||
|
||||
await mutex.do(op1);
|
||||
expect(op1).toHaveBeenCalledTimes(1);
|
||||
|
||||
await mutex.do(op2);
|
||||
expect(op2).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle multiple rapid sequences of operations', async () => {
|
||||
const mutex = new AsyncMutex();
|
||||
const results: string[] = [];
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const op = vi.fn().mockResolvedValue(`result-${i}`);
|
||||
const result = await mutex.do(op);
|
||||
results.push(result as string);
|
||||
}
|
||||
|
||||
expect(results).toEqual(['result-0', 'result-1', 'result-2', 'result-3', 'result-4']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle operations that return undefined', async () => {
|
||||
const mutex = new AsyncMutex<undefined>();
|
||||
const op = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const result = await mutex.do(op);
|
||||
expect(result).toBeUndefined();
|
||||
expect(op).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle operations that return null', async () => {
|
||||
const mutex = new AsyncMutex<null>();
|
||||
const op = vi.fn().mockResolvedValue(null);
|
||||
|
||||
const result = await mutex.do(op);
|
||||
expect(result).toBeNull();
|
||||
expect(op).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle nested operations correctly', async () => {
|
||||
const mutex = new AsyncMutex<string>();
|
||||
|
||||
const innerOp = vi.fn().mockResolvedValue('inner');
|
||||
const outerOp = vi.fn().mockImplementation(async () => {
|
||||
return 'outer';
|
||||
});
|
||||
|
||||
const result = await mutex.do(outerOp);
|
||||
expect(result).toBe('outer');
|
||||
expect(outerOp).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -31,3 +31,119 @@ export function makeSafeRunner(onError: (error: unknown) => void) {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
type AsyncOperation<T> = () => Promise<T>;
|
||||
|
||||
/**
|
||||
* A mutex for asynchronous operations that ensures only one operation runs at a time.
|
||||
*
|
||||
* When multiple callers attempt to execute operations simultaneously, they will all
|
||||
* receive the same promise from the currently running operation, effectively deduplicating
|
||||
* concurrent calls. This is useful for expensive operations like API calls, file operations,
|
||||
* or database queries that should not be executed multiple times concurrently.
|
||||
*
|
||||
* @template T - The default return type for operations when using a default operation
|
||||
*
|
||||
* @example
|
||||
* // Basic usage with explicit operations
|
||||
* const mutex = new AsyncMutex();
|
||||
*
|
||||
* // Multiple concurrent calls will deduplicate
|
||||
* const [result1, result2, result3] = await Promise.all([
|
||||
* mutex.do(() => fetch('/api/data')),
|
||||
* mutex.do(() => fetch('/api/data')), // Same request, will get same promise
|
||||
* mutex.do(() => fetch('/api/data')) // Same request, will get same promise
|
||||
* ]);
|
||||
* // Only one fetch actually happens
|
||||
*
|
||||
* @example
|
||||
* // Usage with a default operation
|
||||
* const dataLoader = new AsyncMutex(() =>
|
||||
* fetch('/api/expensive-data').then(res => res.json())
|
||||
* );
|
||||
*
|
||||
* const data1 = await dataLoader.do(); // Executes the fetch
|
||||
* const data2 = await dataLoader.do(); // If first promise is finished, a new fetch is executed
|
||||
*/
|
||||
export class AsyncMutex<T = unknown> {
|
||||
private currentOperation: Promise<T> | null = null;
|
||||
private defaultOperation?: AsyncOperation<T>;
|
||||
|
||||
/**
|
||||
* Creates a new AsyncMutex instance.
|
||||
*
|
||||
* @param operation - Optional default operation to execute when calling `do()` without arguments.
|
||||
* This is useful when you have a specific operation that should be deduplicated.
|
||||
*
|
||||
* @example
|
||||
* // Without default operation (shared mutex)
|
||||
* const mutex = new AsyncMutex();
|
||||
* const promise1 = mutex.do(() => someAsyncWork());
|
||||
* const promise2 = mutex.do(() => someOtherAsyncWork());
|
||||
*
|
||||
* // Both promises will be the same
|
||||
* expect(await promise1).toBe(await promise2);
|
||||
*
|
||||
* // After the first operation completes, new operations can run
|
||||
* await promise1;
|
||||
* const newPromise = mutex.do(() => someOtherAsyncWork()); // This will execute
|
||||
*
|
||||
* @example
|
||||
* // With default operation (deduplicating a specific operation)
|
||||
* const dataMutex = new AsyncMutex(() => loadExpensiveData());
|
||||
* await dataMutex.do(); // Executes loadExpensiveData()
|
||||
*/
|
||||
constructor(operation?: AsyncOperation<T>) {
|
||||
this.defaultOperation = operation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the provided operation, ensuring only one runs at a time.
|
||||
*
|
||||
* If an operation is already running, all subsequent calls will receive
|
||||
* the same promise from the currently running operation. This effectively
|
||||
* deduplicates concurrent calls to the same expensive operation.
|
||||
*
|
||||
* @param operation - Optional operation to execute. If not provided, uses the default operation.
|
||||
* @returns Promise that resolves with the result of the operation
|
||||
* @throws Error if no operation is provided and no default operation was set
|
||||
*
|
||||
* @example
|
||||
* const mutex = new AsyncMutex();
|
||||
*
|
||||
* // These will all return the same promise
|
||||
* const promise1 = mutex.do(() => fetch('/api/data'));
|
||||
* const promise2 = mutex.do(() => fetch('/api/other')); // Still gets first promise!
|
||||
* const promise3 = mutex.do(() => fetch('/api/another')); // Still gets first promise!
|
||||
*
|
||||
* // After the first operation completes, new operations can run
|
||||
* await promise1;
|
||||
* const newPromise = mutex.do(() => fetch('/api/new')); // This will execute
|
||||
*/
|
||||
do(operation?: AsyncOperation<T>): Promise<T> {
|
||||
if (this.currentOperation) {
|
||||
return this.currentOperation;
|
||||
}
|
||||
const op = operation ?? this.defaultOperation;
|
||||
if (!op) {
|
||||
return Promise.reject(
|
||||
new Error("No operation provided and no default operation set")
|
||||
);
|
||||
}
|
||||
const safeOp = () => {
|
||||
try {
|
||||
return op();
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
};
|
||||
|
||||
const promise = safeOp().finally(() => {
|
||||
if (this.currentOperation === promise) {
|
||||
this.currentOperation = null;
|
||||
}
|
||||
});
|
||||
this.currentOperation = promise;
|
||||
return promise;
|
||||
}
|
||||
}
|
||||
|
||||
181
plugin/builder/utils/changelog.test.ts
Normal file
181
plugin/builder/utils/changelog.test.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import { describe, it, expect, beforeAll } from "vitest";
|
||||
import { execSync } from "child_process";
|
||||
import { getStagingChangelogFromGit } from "./changelog.js";
|
||||
|
||||
describe.sequential("getStagingChangelogFromGit", () => {
|
||||
let currentCommitMessage: string | null = null;
|
||||
|
||||
beforeAll(() => {
|
||||
// Get the current commit message to validate it appears in changelog
|
||||
try {
|
||||
currentCommitMessage = execSync('git log -1 --pretty=%s', { encoding: 'utf8' }).trim();
|
||||
} catch (e) {
|
||||
// Ignore if we can't get commit
|
||||
}
|
||||
});
|
||||
|
||||
it("should generate changelog header with version", { timeout: 20000 }, async () => {
|
||||
const result = await getStagingChangelogFromGit({
|
||||
pluginVersion: "99.99.99",
|
||||
tag: undefined as any,
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe("string");
|
||||
// Should contain version header
|
||||
expect(result).toContain("99.99.99");
|
||||
// Should have markdown header formatting
|
||||
expect(result).toMatch(/##\s+/);
|
||||
});
|
||||
|
||||
it("should generate changelog with tag parameter", { timeout: 20000 }, async () => {
|
||||
// When tag is provided, it should generate changelog with tag in header
|
||||
const result = await getStagingChangelogFromGit({
|
||||
pluginVersion: "99.99.99",
|
||||
tag: "test-tag-99",
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe("string");
|
||||
expect(result).toContain("test-tag-99");
|
||||
|
||||
// Should have a version header
|
||||
expect(result).toMatch(/##\s+/);
|
||||
|
||||
// IMPORTANT: Verify that actual commits are included in the changelog
|
||||
// This ensures the gitRawCommitsOpts is working correctly
|
||||
// The changelog should include commits if there are any between origin/main and HEAD
|
||||
// We check for common changelog patterns that indicate actual content
|
||||
if (result.length > 100) {
|
||||
// If we have a substantial changelog, it should contain commit information
|
||||
expect(
|
||||
result.includes("### Features") ||
|
||||
result.includes("### Bug Fixes") ||
|
||||
result.includes("### ") ||
|
||||
result.includes("* ") // Commit entries typically start with asterisk
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("should handle error gracefully and return tag", { timeout: 20000 }, async () => {
|
||||
// The function catches errors and returns the tag
|
||||
// An empty version might not cause an error, so let's just verify
|
||||
// the function completes without throwing
|
||||
const result = await getStagingChangelogFromGit({
|
||||
pluginVersion: "test-version",
|
||||
tag: "fallback-tag",
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe("string");
|
||||
// Should either return a changelog or the fallback tag
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should use conventional-changelog v7 API correctly", { timeout: 20000 }, async () => {
|
||||
// This test validates that the v7 API is being called correctly
|
||||
// by checking that the function executes without throwing
|
||||
let error: any = null;
|
||||
|
||||
try {
|
||||
await getStagingChangelogFromGit({
|
||||
pluginVersion: "99.99.99",
|
||||
tag: undefined as any,
|
||||
});
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
// The v7 API should work without errors
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
it("should validate changelog structure", { timeout: 20000 }, async () => {
|
||||
// Create a changelog with high version number to avoid conflicts
|
||||
const result = await getStagingChangelogFromGit({
|
||||
pluginVersion: "999.0.0",
|
||||
tag: "v999-test",
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe("string");
|
||||
|
||||
// Verify basic markdown structure
|
||||
if (result.length > 50) {
|
||||
// Should have tag in header when tag is provided
|
||||
expect(result).toMatch(/##\s+\[?v999-test/);
|
||||
// Should be valid markdown with proper line breaks
|
||||
expect(result).toMatch(/\n/);
|
||||
}
|
||||
});
|
||||
|
||||
it("should include actual commits when using gitRawCommitsOpts with tag", { timeout: 20000 }, async () => {
|
||||
// This test ensures that gitRawCommitsOpts is working correctly
|
||||
// and actually fetching commits between origin/main and HEAD
|
||||
const result = await getStagingChangelogFromGit({
|
||||
pluginVersion: "99.99.99",
|
||||
tag: "CI-TEST",
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe("string");
|
||||
|
||||
// The header should contain the tag
|
||||
expect(result).toContain("CI-TEST");
|
||||
|
||||
// Critical: The changelog should NOT be just the tag (error fallback)
|
||||
expect(result).not.toBe("CI-TEST");
|
||||
|
||||
// The changelog should have a proper markdown header
|
||||
expect(result).toMatch(/^##\s+/);
|
||||
|
||||
// Check if we're in a git repo with commits ahead of the base branch
|
||||
let commitCount = 0;
|
||||
try {
|
||||
// Try to detect the base branch (same logic as in changelog.ts)
|
||||
let baseBranch = "origin/main";
|
||||
try {
|
||||
const originHead = execSync("git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null", {
|
||||
encoding: "utf8",
|
||||
stdio: ["ignore", "pipe", "ignore"]
|
||||
}).trim();
|
||||
if (originHead) {
|
||||
baseBranch = originHead.replace("refs/remotes/", "");
|
||||
}
|
||||
} catch {
|
||||
// Try common branches
|
||||
const branches = ["origin/main", "origin/master", "origin/develop"];
|
||||
for (const branch of branches) {
|
||||
try {
|
||||
execSync(`git rev-parse --verify ${branch} 2>/dev/null`, { stdio: "ignore" });
|
||||
baseBranch = branch;
|
||||
break;
|
||||
} catch {
|
||||
// Continue to next branch
|
||||
}
|
||||
}
|
||||
}
|
||||
commitCount = parseInt(execSync(`git rev-list --count ${baseBranch}..HEAD`, { encoding: "utf8" }).trim());
|
||||
} catch {
|
||||
// If we can't determine, we'll check for minimal content
|
||||
}
|
||||
|
||||
// If there are commits on this branch, the changelog MUST include them
|
||||
if (commitCount > 0) {
|
||||
// The changelog must be more than just a header
|
||||
// A minimal header is "## CI-TEST (2025-09-12)\n\n" which is ~30 chars
|
||||
expect(result.length).toBeGreaterThan(50);
|
||||
|
||||
// Should have actual commit content
|
||||
const hasCommitContent =
|
||||
result.includes("### ") || // Section headers like ### Features
|
||||
result.includes("* ") || // Commit bullet points
|
||||
result.includes("- "); // Alternative bullet style
|
||||
|
||||
if (!hasCommitContent) {
|
||||
throw new Error(`Expected changelog to contain commits but got only: ${result.substring(0, 100)}...`);
|
||||
}
|
||||
expect(hasCommitContent).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,40 +1,167 @@
|
||||
import conventionalChangelog from "conventional-changelog";
|
||||
import { ConventionalChangelog } from "conventional-changelog";
|
||||
import { execSync } from "child_process";
|
||||
|
||||
import { PluginEnv } from "../cli/setup-plugin-environment";
|
||||
import { PluginEnv } from "../cli/setup-plugin-environment.js";
|
||||
|
||||
/**
|
||||
* Detects the base branch and finds the merge base for PR changelog generation
|
||||
* Returns the merge-base commit to only show commits from the current PR
|
||||
*/
|
||||
function getMergeBase(): string | null {
|
||||
try {
|
||||
// First, find the base branch
|
||||
let baseBranch: string | null = null;
|
||||
|
||||
// Try to get the default branch from origin/HEAD
|
||||
try {
|
||||
const originHead = execSync("git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null", {
|
||||
encoding: "utf8",
|
||||
stdio: ["ignore", "pipe", "ignore"]
|
||||
}).trim();
|
||||
if (originHead) {
|
||||
baseBranch = originHead.replace("refs/remotes/", "");
|
||||
}
|
||||
} catch {
|
||||
// origin/HEAD not set, continue to next strategy
|
||||
}
|
||||
|
||||
// Try common default branch names if origin/HEAD didn't work
|
||||
if (!baseBranch) {
|
||||
const commonBranches = ["origin/main", "origin/master", "origin/develop"];
|
||||
for (const branch of commonBranches) {
|
||||
try {
|
||||
execSync(`git rev-parse --verify ${branch} 2>/dev/null`, { stdio: "ignore" });
|
||||
baseBranch = branch;
|
||||
break;
|
||||
} catch {
|
||||
// Branch doesn't exist, try next
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!baseBranch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Find the merge-base between the current branch and the base branch
|
||||
// This gives us the commit where the PR branch diverged from main
|
||||
try {
|
||||
const mergeBase = execSync(`git merge-base ${baseBranch} HEAD`, {
|
||||
encoding: "utf8",
|
||||
stdio: ["ignore", "pipe", "ignore"]
|
||||
}).trim();
|
||||
|
||||
return mergeBase;
|
||||
} catch {
|
||||
// If merge-base fails, fall back to the base branch itself
|
||||
return baseBranch;
|
||||
}
|
||||
} catch {
|
||||
// Git command failed entirely, return null
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a simple changelog for PR builds
|
||||
*/
|
||||
function generatePRChangelog(tag: string, mergeBase: string): string | null {
|
||||
try {
|
||||
// Get commits from this PR only with conventional commit parsing
|
||||
const commits = execSync(
|
||||
`git log ${mergeBase}..HEAD --pretty=format:"%s|%h" --reverse`,
|
||||
{ encoding: "utf8", stdio: ["ignore", "pipe", "pipe"] }
|
||||
).trim();
|
||||
|
||||
if (!commits) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const lines = commits.split('\n').filter(Boolean);
|
||||
const features: string[] = [];
|
||||
const fixes: string[] = [];
|
||||
const other: string[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
const [message, hash] = line.split('|');
|
||||
const formatted = `* ${message} (${hash})`;
|
||||
|
||||
if (message.startsWith('feat')) {
|
||||
features.push(formatted);
|
||||
} else if (message.startsWith('fix')) {
|
||||
fixes.push(formatted);
|
||||
} else {
|
||||
other.push(formatted);
|
||||
}
|
||||
}
|
||||
|
||||
let changelog = `## [${tag}](https://github.com/unraid/api/${tag})\n\n`;
|
||||
|
||||
if (features.length > 0) {
|
||||
changelog += `### Features\n\n${features.join('\n')}\n\n`;
|
||||
}
|
||||
if (fixes.length > 0) {
|
||||
changelog += `### Bug Fixes\n\n${fixes.join('\n')}\n\n`;
|
||||
}
|
||||
if (other.length > 0) {
|
||||
changelog += `### Other Changes\n\n${other.join('\n')}\n\n`;
|
||||
}
|
||||
|
||||
return changelog;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export const getStagingChangelogFromGit = async ({
|
||||
pluginVersion,
|
||||
tag,
|
||||
}: Pick<PluginEnv, "pluginVersion" | "tag">): Promise<string> => {
|
||||
try {
|
||||
const changelogStream = conventionalChangelog(
|
||||
{
|
||||
preset: "conventionalcommits",
|
||||
},
|
||||
{
|
||||
version: pluginVersion,
|
||||
},
|
||||
tag
|
||||
? {
|
||||
from: "origin/main",
|
||||
to: "HEAD",
|
||||
}
|
||||
: {},
|
||||
undefined,
|
||||
tag
|
||||
? {
|
||||
headerPartial: `## [${tag}](https://github.com/unraid/api/${tag})\n\n`,
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
// For PR builds with a tag, try to generate a simple PR-specific changelog
|
||||
if (tag) {
|
||||
const mergeBase = getMergeBase();
|
||||
if (mergeBase) {
|
||||
const prChangelog = generatePRChangelog(tag, mergeBase);
|
||||
if (prChangelog) {
|
||||
return prChangelog;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to conventional-changelog for non-PR builds or if PR detection fails
|
||||
const options: any = {
|
||||
releaseCount: 1,
|
||||
};
|
||||
|
||||
if (tag) {
|
||||
options.writerOpts = {
|
||||
headerPartial: `## [${tag}](https://github.com/unraid/api/${tag})\n\n`,
|
||||
};
|
||||
}
|
||||
|
||||
const generator = new ConventionalChangelog()
|
||||
.loadPreset("conventionalcommits")
|
||||
.context({
|
||||
version: tag || pluginVersion,
|
||||
...(tag && {
|
||||
linkCompare: false,
|
||||
}),
|
||||
})
|
||||
.options(options);
|
||||
|
||||
let changelog = "";
|
||||
for await (const chunk of changelogStream) {
|
||||
for await (const chunk of generator.write()) {
|
||||
changelog += chunk;
|
||||
}
|
||||
// Encode HTML entities using the 'he' library
|
||||
return changelog ?? "";
|
||||
|
||||
return changelog || "";
|
||||
} catch (err) {
|
||||
console.log('Non-fatal error: Failed to get changelog from git:', err);
|
||||
return tag;
|
||||
// Return a properly formatted fallback with markdown header
|
||||
if (tag) {
|
||||
return `## [${tag}](https://github.com/unraid/api/${tag})\n\n`;
|
||||
}
|
||||
return `## ${pluginVersion}\n\n`;
|
||||
}
|
||||
};
|
||||
};
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"name": "@unraid/connect-plugin",
|
||||
"version": "4.20.1",
|
||||
"version": "4.22.2",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"commander": "14.0.0",
|
||||
"conventional-changelog": "6.0.0",
|
||||
"conventional-changelog": "7.1.1",
|
||||
"conventional-changelog-conventionalcommits": "9.1.0",
|
||||
"date-fns": "4.1.0",
|
||||
"glob": "11.0.3",
|
||||
"html-sloppy-escaper": "0.1.0",
|
||||
@@ -32,8 +33,9 @@
|
||||
"env:validate": "test -f .env || (echo 'Error: .env file missing. Run npm run env:init first' && exit 1)",
|
||||
"env:clean": "rm -f .env",
|
||||
"// Testing": "",
|
||||
"test": "vitest && pnpm run test:extractor",
|
||||
"test:extractor": "bash ./tests/test-extractor.sh"
|
||||
"test": "vitest && pnpm run test:extractor && pnpm run test:shell-detection",
|
||||
"test:extractor": "bash ./tests/test-extractor.sh",
|
||||
"test:shell-detection": "bash ./tests/test-shell-detection.sh"
|
||||
},
|
||||
"devDependencies": {
|
||||
"http-server": "14.1.1",
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
Menu="ManagementAccess:99"
|
||||
Title="Unraid API Status"
|
||||
Icon="icon-u-globe"
|
||||
Tag="globe"
|
||||
---
|
||||
<!-- API Status Manager -->
|
||||
<unraid-api-status-manager></unraid-api-status-manager>
|
||||
|
||||
<!-- end unraid-api section -->
|
||||
@@ -1,5 +1,5 @@
|
||||
Menu="ManagementAccess:100"
|
||||
Title="Unraid API"
|
||||
Title="Unraid API Settings"
|
||||
Icon="icon-u-globe"
|
||||
Tag="globe"
|
||||
---
|
||||
@@ -596,8 +596,10 @@ $(function() {
|
||||
_(Unraid API extra origins)_:
|
||||
_(Connect Remote Access)_:
|
||||
_(GraphQL API Developer Sandbox)_:
|
||||
_(OIDC Configuration)_:
|
||||
|
||||
</div>
|
||||
|
||||
<!-- start unraid-api section -->
|
||||
<unraid-connect-settings></unraid-connect-settings>
|
||||
<!-- end unraid-api section -->
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ $validCommands = [
|
||||
'start',
|
||||
'restart',
|
||||
'stop',
|
||||
'status',
|
||||
'report',
|
||||
'wanip'
|
||||
];
|
||||
@@ -68,7 +69,12 @@ switch ($command) {
|
||||
response_complete(200, array('result' => $output), $output);
|
||||
break;
|
||||
case 'restart':
|
||||
exec('unraid-api restart 2>/dev/null', $output, $retval);
|
||||
exec('/etc/rc.d/rc.unraid-api restart 2>&1', $output, $retval);
|
||||
$output = implode(PHP_EOL, $output);
|
||||
response_complete(200, array('success' => ($retval === 0), 'result' => $output, 'error' => ($retval !== 0 ? $output : null)), $output);
|
||||
break;
|
||||
case 'status':
|
||||
exec('unraid-api status 2>&1', $output, $retval);
|
||||
$output = implode(PHP_EOL, $output);
|
||||
response_complete(200, array('result' => $output), $output);
|
||||
break;
|
||||
|
||||
@@ -2,6 +2,41 @@
|
||||
# Unraid API Installation Verification Script
|
||||
# Checks that critical files are installed correctly
|
||||
|
||||
# Function to check for non-bash shells
|
||||
check_shell() {
|
||||
# This script runs with #!/bin/bash shebang
|
||||
# On Unraid, users may configure bash to load other shells through .bashrc
|
||||
# We need to check if the interpreter running this script is actually bash
|
||||
# Use readlink on /proc to find the actual interpreter, not the script name
|
||||
local current_shell
|
||||
|
||||
# Get the actual interpreter from /proc
|
||||
if [ -e "/proc/$$/exe" ]; then
|
||||
current_shell=$(readlink "/proc/$$/exe")
|
||||
else
|
||||
# Fallback to checking the current process if /proc isn't available
|
||||
# Note: This may return the script name on some systems
|
||||
current_shell=$(ps -o comm= -p $$)
|
||||
fi
|
||||
|
||||
# Remove any path and get just the shell name
|
||||
current_shell=$(basename "$current_shell")
|
||||
|
||||
if [[ "$current_shell" != "bash" ]]; then
|
||||
echo "Unsupported shell detected: $current_shell" >&2
|
||||
echo "Unraid scripts require bash but your system is configured to use $current_shell for scripts." >&2
|
||||
echo "This can cause infinite loops or unexpected behavior when Unraid scripts execute." >&2
|
||||
echo "Please configure $current_shell to only activate for interactive shells." >&2
|
||||
echo "Add this check to your ~/.bashrc or /etc/profile before starting $current_shell:" >&2
|
||||
echo " [[ \$- == *i* ]] && exec $current_shell" >&2
|
||||
echo "This ensures $current_shell only starts for interactive sessions, not scripts." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Run shell check first
|
||||
check_shell
|
||||
|
||||
echo "Performing comprehensive installation verification..."
|
||||
|
||||
# Define critical files to check (POSIX-compliant, no arrays)
|
||||
|
||||
159
plugin/tests/test-shell-detection.sh
Executable file
159
plugin/tests/test-shell-detection.sh
Executable file
@@ -0,0 +1,159 @@
|
||||
#!/bin/bash
|
||||
# Test script for shell detection logic in verify_install.sh
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
VERIFY_SCRIPT="$SCRIPT_DIR/../source/dynamix.unraid.net/usr/local/share/dynamix.unraid.net/install/scripts/verify_install.sh"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test counter
|
||||
TESTS_RUN=0
|
||||
TESTS_PASSED=0
|
||||
|
||||
# Helper function to run a test
|
||||
run_test() {
|
||||
local test_name="$1"
|
||||
local test_cmd="$2"
|
||||
local expected_result="$3"
|
||||
|
||||
TESTS_RUN=$((TESTS_RUN + 1))
|
||||
|
||||
echo -n "Testing: $test_name ... "
|
||||
|
||||
# Run the test and capture exit code
|
||||
set +e
|
||||
output=$($test_cmd 2>&1)
|
||||
result=$?
|
||||
set -e
|
||||
|
||||
if [ "$result" -eq "$expected_result" ]; then
|
||||
echo -e "${GREEN}PASS${NC}"
|
||||
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||
else
|
||||
echo -e "${RED}FAIL${NC}"
|
||||
echo " Expected exit code: $expected_result, Got: $result"
|
||||
echo " Output: $output"
|
||||
fi
|
||||
}
|
||||
|
||||
# Extract just the check_shell function from verify_install.sh
|
||||
extract_check_shell() {
|
||||
cat << 'EOF'
|
||||
#!/bin/bash
|
||||
check_shell() {
|
||||
# This script runs with #!/bin/bash shebang
|
||||
# On Unraid, users may configure bash to load other shells through .bashrc
|
||||
# We need to check if the interpreter running this script is actually bash
|
||||
# Use readlink on /proc to find the actual interpreter, not the script name
|
||||
local current_shell
|
||||
|
||||
# Get the actual interpreter from /proc
|
||||
if [ -e "/proc/$$/exe" ]; then
|
||||
current_shell=$(readlink "/proc/$$/exe")
|
||||
else
|
||||
# Fallback to checking the current process if /proc isn't available
|
||||
# Note: This may return the script name on some systems
|
||||
current_shell=$(ps -o comm= -p $$)
|
||||
fi
|
||||
|
||||
# Remove any path and get just the shell name
|
||||
current_shell=$(basename "$current_shell")
|
||||
|
||||
if [[ "$current_shell" != "bash" ]]; then
|
||||
echo "Unsupported shell detected: $current_shell" >&2
|
||||
echo "Unraid scripts require bash but your system is configured to use $current_shell for scripts." >&2
|
||||
echo "This can cause infinite loops or unexpected behavior when Unraid scripts execute." >&2
|
||||
echo "Please configure $current_shell to only activate for interactive shells." >&2
|
||||
echo "Add this check to your ~/.bashrc or /etc/profile before starting $current_shell:" >&2
|
||||
echo " [[ \$- == *i* ]] && exec $current_shell" >&2
|
||||
echo "This ensures $current_shell only starts for interactive sessions, not scripts." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
check_shell
|
||||
EOF
|
||||
}
|
||||
|
||||
echo "=== Shell Detection Tests ==="
|
||||
echo
|
||||
|
||||
# Test 1: Running with bash should succeed
|
||||
echo "Test 1: Direct bash execution"
|
||||
TEMP_SCRIPT=$(mktemp)
|
||||
extract_check_shell > "$TEMP_SCRIPT"
|
||||
chmod +x "$TEMP_SCRIPT"
|
||||
run_test "Bash interpreter (should pass)" "bash $TEMP_SCRIPT" 0
|
||||
rm -f "$TEMP_SCRIPT"
|
||||
|
||||
# Test 2: Check that the actual verify_install.sh script works with bash
|
||||
echo "Test 2: Verify install script with bash"
|
||||
if [ -f "$VERIFY_SCRIPT" ]; then
|
||||
# Create a modified version that only runs check_shell
|
||||
TEMP_VERIFY=$(mktemp)
|
||||
sed -n '1,/^check_shell$/p' "$VERIFY_SCRIPT" > "$TEMP_VERIFY"
|
||||
echo "exit 0" >> "$TEMP_VERIFY"
|
||||
chmod +x "$TEMP_VERIFY"
|
||||
run_test "Verify install script shell check" "bash $TEMP_VERIFY" 0
|
||||
rm -f "$TEMP_VERIFY"
|
||||
else
|
||||
echo -e "${YELLOW}SKIP${NC} - verify_install.sh not found"
|
||||
fi
|
||||
|
||||
# Test 3: Simulate non-bash shell (if available)
|
||||
echo "Test 3: Non-bash shell simulation"
|
||||
if command -v sh >/dev/null 2>&1 && [ "$(readlink -f "$(command -v sh)")" != "$(readlink -f "$(command -v bash)")" ]; then
|
||||
TEMP_SCRIPT=$(mktemp)
|
||||
# Create a test that will fail if sh is detected
|
||||
cat << 'EOF' > "$TEMP_SCRIPT"
|
||||
#!/bin/sh
|
||||
# This simulates what would happen if a non-bash shell was detected
|
||||
current_shell=$(basename "$(readlink -f /proc/$$/exe 2>/dev/null || echo sh)")
|
||||
if [ "$current_shell" != "bash" ]; then
|
||||
echo "Detected non-bash shell: $current_shell" >&2
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
EOF
|
||||
chmod +x "$TEMP_SCRIPT"
|
||||
run_test "Non-bash shell detection" "sh $TEMP_SCRIPT" 1
|
||||
rm -f "$TEMP_SCRIPT"
|
||||
else
|
||||
echo -e "${YELLOW}SKIP${NC} - sh not available or is symlinked to bash"
|
||||
fi
|
||||
|
||||
# Test 4: Check /proc availability (informational only, not a failure)
|
||||
echo "Test 4: /proc filesystem check"
|
||||
if [ -e "/proc/$$/exe" ]; then
|
||||
echo -e "${GREEN}INFO${NC} - /proc filesystem is available"
|
||||
else
|
||||
echo -e "${YELLOW}INFO${NC} - /proc filesystem not available, fallback to ps will be used"
|
||||
fi
|
||||
|
||||
# Test 5: Verify the script name is not detected as shell
|
||||
echo "Test 5: Script name not detected as shell"
|
||||
TEMP_SCRIPT=$(mktemp -t verify_install.XXXXXX)
|
||||
extract_check_shell > "$TEMP_SCRIPT"
|
||||
chmod +x "$TEMP_SCRIPT"
|
||||
# This should pass because it's still bash, even though the script is named verify_install
|
||||
run_test "Script named verify_install (should still pass)" "bash $TEMP_SCRIPT" 0
|
||||
rm -f "$TEMP_SCRIPT"
|
||||
|
||||
echo
|
||||
echo "=== Test Summary ==="
|
||||
echo "Tests run: $TESTS_RUN"
|
||||
echo "Tests passed: $TESTS_PASSED"
|
||||
echo "Tests failed: $((TESTS_RUN - TESTS_PASSED))"
|
||||
|
||||
if [ "$TESTS_PASSED" -eq "$TESTS_RUN" ]; then
|
||||
echo -e "${GREEN}All tests passed!${NC}"
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}Some tests failed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
4601
pnpm-lock.yaml
generated
4601
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
154
readme.md
154
readme.md
@@ -1,5 +1,6 @@
|
||||
<!-- Adapted from: https://github.com/othneildrew/Best-README-Template -->
|
||||
<!-- Improved compatibility of back to top link: See: https://github.com/othneildrew/Best-README-Template/pull/73 -->
|
||||
|
||||
<a id="readme-top"></a>
|
||||
|
||||
<!-- PROJECT SHIELDS -->
|
||||
@@ -91,9 +92,10 @@
|
||||
</details>
|
||||
|
||||
<!-- ABOUT THE PROJECT -->
|
||||
|
||||
## About The Project
|
||||
|
||||
<!-- [![Product Name Screen Shot][product-screenshot]](https://unraid.net)
|
||||
<!-- [![Product Name Screen Shot][product-screenshot]](https://unraid.net)
|
||||
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p> -->
|
||||
|
||||
@@ -108,6 +110,7 @@
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p>
|
||||
|
||||
<!-- GETTING STARTED -->
|
||||
|
||||
## Getting Started
|
||||
|
||||
This section will guide you through the steps necessary to get the monorepo projects running and
|
||||
@@ -117,13 +120,32 @@ communicating with each other.
|
||||
|
||||
Make sure the following software is installed before proceeding.
|
||||
|
||||
* Bash
|
||||
* Docker (for macOS folks, Orbstack works too)
|
||||
* [Node.js (v22)][Node-url]
|
||||
* [Just](https://github.com/casey/just) (optional)
|
||||
* libvirt (macOS folks can run `brew install libvirt`)
|
||||
* rclone (for development)
|
||||
* An [Unraid][Unraid-url] server for development
|
||||
- Bash
|
||||
- Docker (for macOS folks, Orbstack works too)
|
||||
- [Node.js (v22)][Node-url]
|
||||
- [pnpm](https://pnpm.io/) (v9.0+) - Install with `npm install -g pnpm`
|
||||
- [Just](https://github.com/casey/just) (optional)
|
||||
- libvirt (macOS folks can run `brew install libvirt`)
|
||||
- rclone (v1.70+) - **Important:** Version 1.70 or higher is required
|
||||
- jq - JSON processor for scripts
|
||||
- An [Unraid][Unraid-url] server for development
|
||||
|
||||
#### Ubuntu/WSL Users
|
||||
|
||||
For Ubuntu or WSL users, note that the default Ubuntu repositories may have older versions of rclone. You'll need rclone v1.70 or higher, which can be obtained from the [rclone releases page](https://github.com/rclone/rclone/releases).
|
||||
|
||||
#### Verify Prerequisites
|
||||
|
||||
After installation, verify your dependencies:
|
||||
|
||||
```sh
|
||||
# Verify installations and versions
|
||||
node --version # Should be v22.x
|
||||
pnpm --version # Should be v9.0+
|
||||
rclone version # Should be v1.70+
|
||||
jq --version # Should be installed
|
||||
docker --version # Should be installed
|
||||
```
|
||||
|
||||
#### Alternative: Using Nix Flake
|
||||
|
||||
@@ -154,25 +176,86 @@ Once you have your key pair, add your public SSH key to your Unraid server:
|
||||
cd api
|
||||
```
|
||||
|
||||
If using Nix, enter the development environment:
|
||||
|
||||
```sh
|
||||
nix develop
|
||||
```
|
||||
|
||||
2. Run the monorepo setup command.
|
||||
If using Nix, enter the development environment:
|
||||
|
||||
```sh
|
||||
pnpm install
|
||||
nix develop
|
||||
```
|
||||
|
||||
3. Run the build watcher to build the components and serve a local plugin file that can be installed on your Unraid server.
|
||||
2. Install dependencies and verify they're correctly installed:
|
||||
|
||||
```sh
|
||||
pnpm build:watch
|
||||
# Install all monorepo dependencies
|
||||
pnpm install
|
||||
|
||||
# The install script will automatically check for required dependencies
|
||||
# and their versions (rclone v1.70+, jq, pnpm, etc.)
|
||||
```
|
||||
|
||||
Navigate to Plugins->Install and install the local plugin file that is output to the console.
|
||||
3. Build the project:
|
||||
|
||||
```sh
|
||||
# Build individual packages first (from root directory)
|
||||
cd api && pnpm build && cd ..
|
||||
cd web && pnpm build && cd ..
|
||||
|
||||
# Then build the plugin if needed
|
||||
cd plugin && pnpm build && cd ..
|
||||
```
|
||||
|
||||
Note: The packages must be built in order as the plugin depends on the API build artifacts.
|
||||
|
||||
### Development Modes
|
||||
|
||||
The project supports two development modes:
|
||||
|
||||
#### Mode 1: Build Watcher with Local Plugin
|
||||
|
||||
This mode builds the plugin continuously and serves it locally for installation on your Unraid server:
|
||||
|
||||
```sh
|
||||
# From the root directory (api/)
|
||||
pnpm build:watch
|
||||
```
|
||||
|
||||
This command will output a local plugin URL that you can install on your Unraid server by navigating to Plugins → Install Plugin. Be aware it will take a *while* to build the first time.
|
||||
|
||||
#### Mode 2: Development Servers
|
||||
|
||||
For active development with hot-reload:
|
||||
|
||||
```sh
|
||||
# From the root directory - runs all dev servers concurrently
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Or run individual development servers:
|
||||
|
||||
```sh
|
||||
# API server (GraphQL backend at http://localhost:3001)
|
||||
cd api && pnpm dev
|
||||
|
||||
# Web interface (Nuxt frontend at http://localhost:3000)
|
||||
cd web && pnpm dev
|
||||
```
|
||||
|
||||
### Building the Full Plugin
|
||||
|
||||
To build the complete plugin package (.plg file):
|
||||
|
||||
```sh
|
||||
# From the root directory (api/)
|
||||
pnpm build:plugin
|
||||
|
||||
# The plugin will be created in plugin/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
To deploy the plugin to your Unraid server:
|
||||
|
||||
```sh
|
||||
# Replace SERVER_IP with your Unraid server's IP address
|
||||
pnpm unraid:deploy SERVER_IP
|
||||
```
|
||||
|
||||
> [!TIP]
|
||||
> View other workflows (local dev, etc.) in the [Developer Workflows](./api/docs/developer/workflows.md)
|
||||
@@ -180,6 +263,7 @@ Once you have your key pair, add your public SSH key to your Unraid server:
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p>
|
||||
|
||||
<!-- USAGE EXAMPLES -->
|
||||
|
||||
## Usage
|
||||
|
||||
See [How to Use the API](./api/docs/public/how-to-use-the-api.md).
|
||||
@@ -201,6 +285,7 @@ See the [open issues](https://github.com/unraid/api/issues) for a full list of p
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p> -->
|
||||
|
||||
<!-- CONTRIBUTING -->
|
||||
|
||||
## Contributing
|
||||
|
||||
For a complete guide on contributing to the project, including our code of conduct and development process, please see our [Contributing Guide](./CONTRIBUTING.md). Please read this before contributing.
|
||||
@@ -209,28 +294,30 @@ For a complete guide on contributing to the project, including our code of condu
|
||||
|
||||
For more information about development workflows, repository organization, and other technical details, please refer to the developer documentation inside this repository:
|
||||
|
||||
* [Development Guide](./api/docs/developer/development.md) - Setup, building, and debugging instructions
|
||||
* [Development Workflows](./api/docs/developer/workflows.md) - Detailed workflows for local development, building, and deployment
|
||||
* [Repository Organization](./api/docs/developer/repo-organization.md) - High-level architecture and project structure
|
||||
- [Development Guide](./api/docs/developer/development.md) - Setup, building, and debugging instructions
|
||||
- [Development Workflows](./api/docs/developer/workflows.md) - Detailed workflows for local development, building, and deployment
|
||||
- [Repository Organization](./api/docs/developer/repo-organization.md) - High-level architecture and project structure
|
||||
|
||||
### Work Intent Process
|
||||
|
||||
Before starting development work on this project, you must submit a Work Intent and have it approved by a core developer. This helps prevent duplicate work and ensures changes align with the project's goals.
|
||||
|
||||
1. **Create a Work Intent**
|
||||
* Go to [Issues → New Issue → Work Intent](https://github.com/unraid/api/issues/new?template=work_intent.md)
|
||||
* Fill out the brief template describing what you want to work on
|
||||
* The issue will be automatically labeled as `work-intent` and `unapproved`
|
||||
|
||||
- Go to [Issues → New Issue → Work Intent](https://github.com/unraid/api/issues/new?template=work_intent.md)
|
||||
- Fill out the brief template describing what you want to work on
|
||||
- The issue will be automatically labeled as `work-intent` and `unapproved`
|
||||
|
||||
2. **Wait for Approval**
|
||||
* A core developer will review your Work Intent
|
||||
* They may ask questions or suggest changes
|
||||
* Once approved, the `unapproved` label will be removed
|
||||
|
||||
- A core developer will review your Work Intent
|
||||
- They may ask questions or suggest changes
|
||||
- Once approved, the `unapproved` label will be removed
|
||||
|
||||
3. **Begin Development**
|
||||
* Only start coding after your Work Intent is approved
|
||||
* Follow the approach outlined in your approved Work Intent
|
||||
* Reference the Work Intent in your future PR
|
||||
- Only start coding after your Work Intent is approved
|
||||
- Follow the approach outlined in your approved Work Intent
|
||||
- Reference the Work Intent in your future PR
|
||||
|
||||
---
|
||||
|
||||
@@ -254,14 +341,16 @@ Don't forget to give the project a star! Thanks again!
|
||||
</a>
|
||||
|
||||
<!-- Community & Acknowledgements -->
|
||||
|
||||
## Community
|
||||
|
||||
🌐 [Forums](https://forums.unraid.net/)
|
||||
💬 [Discord](https://discord.unraid.net/)
|
||||
💬 [Discord](https://discord.unraid.net/)
|
||||
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p>
|
||||
|
||||
<!-- CONTACT -->
|
||||
|
||||
## Contact
|
||||
|
||||
[@UnraidOfficial](https://twitter.com/UnraidOfficial) - <contact@unraid.net>
|
||||
@@ -272,6 +361,7 @@ Project Link: [https://github.com/unraid/api](https://github.com/unraid/api)
|
||||
|
||||
<!-- MARKDOWN LINKS & IMAGES -->
|
||||
<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->
|
||||
|
||||
[contributors-shield]: https://img.shields.io/github/contributors/unraid/api.svg?style=for-the-badge
|
||||
[contributors-url]: https://github.com/unraid/api/graphs/contributors
|
||||
[forks-shield]: https://img.shields.io/github/forks/unraid/api.svg?style=for-the-badge
|
||||
|
||||
153
scripts/cleanup-old-builds.sh
Executable file
153
scripts/cleanup-old-builds.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to clean up old timestamped builds from Cloudflare R2
|
||||
# This will remove old .txz files with the pattern dynamix.unraid.net-YYYY.MM.DD.HHMM.txz
|
||||
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${YELLOW}🧹 Cloudflare Old Build Cleanup Script${NC}"
|
||||
echo "This will delete old timestamped .txz builds from the preview bucket"
|
||||
echo ""
|
||||
|
||||
# Check for required environment variables
|
||||
if [ -z "$CF_ACCESS_KEY_ID" ] || [ -z "$CF_SECRET_ACCESS_KEY" ] || [ -z "$CF_ENDPOINT" ] || [ -z "$CF_BUCKET_PREVIEW" ]; then
|
||||
echo -e "${RED}❌ Error: Missing required environment variables${NC}"
|
||||
echo "Please set the following environment variables:"
|
||||
echo " - CF_ACCESS_KEY_ID"
|
||||
echo " - CF_SECRET_ACCESS_KEY"
|
||||
echo " - CF_ENDPOINT"
|
||||
echo " - CF_BUCKET_PREVIEW"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure AWS CLI for Cloudflare R2
|
||||
export AWS_ACCESS_KEY_ID="$CF_ACCESS_KEY_ID"
|
||||
export AWS_SECRET_ACCESS_KEY="$CF_SECRET_ACCESS_KEY"
|
||||
export AWS_DEFAULT_REGION="auto"
|
||||
|
||||
echo "Endpoint: $CF_ENDPOINT"
|
||||
echo "Bucket: $CF_BUCKET_PREVIEW"
|
||||
echo ""
|
||||
|
||||
# Optional: specify number of days to keep (default: 7)
|
||||
KEEP_DAYS=${1:-7}
|
||||
echo -e "${BLUE}Keeping builds from the last ${KEEP_DAYS} days${NC}"
|
||||
echo ""
|
||||
|
||||
# Calculate cutoff date
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS
|
||||
CUTOFF_DATE=$(date -v -${KEEP_DAYS}d +"%Y.%m.%d")
|
||||
else
|
||||
# Linux
|
||||
CUTOFF_DATE=$(date -d "${KEEP_DAYS} days ago" +"%Y.%m.%d")
|
||||
fi
|
||||
|
||||
echo "Cutoff date: ${CUTOFF_DATE} (will delete builds older than this)"
|
||||
echo ""
|
||||
|
||||
# List all timestamped TXZ files in the unraid-api directory
|
||||
echo -e "${YELLOW}📋 Scanning for old builds...${NC}"
|
||||
|
||||
# Get all .txz files matching the pattern
|
||||
ALL_FILES=$(aws s3 ls "s3://${CF_BUCKET_PREVIEW}/unraid-api/" --endpoint-url "$CF_ENDPOINT" --recursive | \
|
||||
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
|
||||
awk '{print $4}' || true)
|
||||
|
||||
if [ -z "$ALL_FILES" ]; then
|
||||
echo -e "${GREEN}✅ No timestamped builds found${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Filter files older than cutoff
|
||||
OLD_FILES=""
|
||||
KEEP_FILES=""
|
||||
TOTAL_COUNT=0
|
||||
OLD_COUNT=0
|
||||
|
||||
while IFS= read -r file; do
|
||||
((TOTAL_COUNT++))
|
||||
# Extract date from filename (format: YYYY.MM.DD.HHMM)
|
||||
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
|
||||
FILE_DATE="${BASH_REMATCH[1]}"
|
||||
|
||||
# Compare dates (string comparison works for YYYY.MM.DD format)
|
||||
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
|
||||
OLD_FILES="${OLD_FILES}${file}\n"
|
||||
((OLD_COUNT++))
|
||||
else
|
||||
KEEP_FILES="${KEEP_FILES}${file}\n"
|
||||
fi
|
||||
fi
|
||||
done <<< "$ALL_FILES"
|
||||
|
||||
echo "Found ${TOTAL_COUNT} total timestamped builds"
|
||||
echo "Will delete ${OLD_COUNT} old builds"
|
||||
echo "Will keep $((TOTAL_COUNT - OLD_COUNT)) recent builds"
|
||||
echo ""
|
||||
|
||||
if [ "$OLD_COUNT" -eq 0 ]; then
|
||||
echo -e "${GREEN}✅ No old builds to delete${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Show sample of files to be deleted
|
||||
echo -e "${YELLOW}Sample of files to be deleted:${NC}"
|
||||
echo -e "$OLD_FILES" | head -5
|
||||
if [ "$OLD_COUNT" -gt 5 ]; then
|
||||
echo "... and $((OLD_COUNT - 5)) more"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Confirmation prompt
|
||||
read -p "Are you sure you want to delete these ${OLD_COUNT} old builds? (yes/no): " -r
|
||||
echo ""
|
||||
|
||||
if [[ ! $REPLY =~ ^[Yy]es$ ]]; then
|
||||
echo -e "${YELLOW}⚠️ Cleanup cancelled${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Delete old files
|
||||
DELETED=0
|
||||
FAILED=0
|
||||
|
||||
echo -e "${YELLOW}🗑️ Deleting old builds...${NC}"
|
||||
while IFS= read -r file; do
|
||||
if [ -n "$file" ]; then
|
||||
echo -n "Deleting $(basename "$file")... "
|
||||
|
||||
if aws s3 rm "s3://${CF_BUCKET_PREVIEW}/${file}" \
|
||||
--endpoint-url "$CF_ENDPOINT" \
|
||||
>/dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓${NC}"
|
||||
((DELETED++))
|
||||
else
|
||||
echo -e "${RED}✗${NC}"
|
||||
((FAILED++))
|
||||
fi
|
||||
fi
|
||||
done <<< "$(echo -e "$OLD_FILES")"
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}🎉 Cleanup complete!${NC}"
|
||||
echo " - Deleted: $DELETED old build(s)"
|
||||
if [ $FAILED -gt 0 ]; then
|
||||
echo -e " - Failed: ${RED}$FAILED${NC} build(s)"
|
||||
fi
|
||||
|
||||
# Show remaining recent builds
|
||||
echo ""
|
||||
echo -e "${BLUE}📦 Recent builds kept:${NC}"
|
||||
echo -e "$KEEP_FILES" | head -5
|
||||
KEEP_COUNT=$(echo -e "$KEEP_FILES" | grep -c . || echo 0)
|
||||
if [ "$KEEP_COUNT" -gt 5 ]; then
|
||||
echo "... and $((KEEP_COUNT - 5)) more"
|
||||
fi
|
||||
107
scripts/cleanup-pr-builds.sh
Executable file
107
scripts/cleanup-pr-builds.sh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to delete all PR builds from Cloudflare R2
|
||||
# This will remove all artifacts under unraid-api/tag/PR* paths
|
||||
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${YELLOW}🧹 Cloudflare PR Build Cleanup Script${NC}"
|
||||
echo "This will delete all PR builds from the preview bucket"
|
||||
echo ""
|
||||
|
||||
# Check for required environment variables
|
||||
if [ -z "$CF_ACCESS_KEY_ID" ] || [ -z "$CF_SECRET_ACCESS_KEY" ] || [ -z "$CF_ENDPOINT" ] || [ -z "$CF_BUCKET_PREVIEW" ]; then
|
||||
echo -e "${RED}❌ Error: Missing required environment variables${NC}"
|
||||
echo "Please set the following environment variables:"
|
||||
echo " - CF_ACCESS_KEY_ID"
|
||||
echo " - CF_SECRET_ACCESS_KEY"
|
||||
echo " - CF_ENDPOINT"
|
||||
echo " - CF_BUCKET_PREVIEW"
|
||||
echo ""
|
||||
echo "You can source them from your .env file or export them manually:"
|
||||
echo " export CF_ACCESS_KEY_ID='your-key-id'"
|
||||
echo " export CF_SECRET_ACCESS_KEY='your-secret-key'"
|
||||
echo " export CF_ENDPOINT='your-endpoint'"
|
||||
echo " export CF_BUCKET_PREVIEW='your-bucket'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure AWS CLI for Cloudflare R2
|
||||
export AWS_ACCESS_KEY_ID="$CF_ACCESS_KEY_ID"
|
||||
export AWS_SECRET_ACCESS_KEY="$CF_SECRET_ACCESS_KEY"
|
||||
export AWS_DEFAULT_REGION="auto"
|
||||
|
||||
echo "Endpoint: $CF_ENDPOINT"
|
||||
echo "Bucket: $CF_BUCKET_PREVIEW"
|
||||
echo ""
|
||||
|
||||
# List all PR directories
|
||||
echo -e "${YELLOW}📋 Listing all PR builds...${NC}"
|
||||
PR_DIRS=$(aws s3 ls "s3://${CF_BUCKET_PREVIEW}/unraid-api/tag/" --endpoint-url "$CF_ENDPOINT" 2>/dev/null | grep "PRE PR" | awk '{print $2}' || true)
|
||||
|
||||
if [ -z "$PR_DIRS" ]; then
|
||||
echo -e "${GREEN}✅ No PR builds found to clean up${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Count PR builds
|
||||
PR_COUNT=$(echo "$PR_DIRS" | wc -l | tr -d ' ')
|
||||
echo -e "Found ${YELLOW}${PR_COUNT}${NC} PR build(s):"
|
||||
echo "$PR_DIRS"
|
||||
echo ""
|
||||
|
||||
# Confirmation prompt
|
||||
read -p "Are you sure you want to delete ALL these PR builds? (yes/no): " -r
|
||||
echo ""
|
||||
|
||||
if [[ ! $REPLY =~ ^[Yy]es$ ]]; then
|
||||
echo -e "${YELLOW}⚠️ Cleanup cancelled${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Delete each PR directory
|
||||
DELETED=0
|
||||
FAILED=0
|
||||
|
||||
for PR_DIR in $PR_DIRS; do
|
||||
PR_NUM=${PR_DIR%/} # Remove trailing slash
|
||||
echo -n "Deleting $PR_NUM... "
|
||||
|
||||
if aws s3 rm "s3://${CF_BUCKET_PREVIEW}/unraid-api/tag/${PR_NUM}" \
|
||||
--recursive \
|
||||
--endpoint-url "$CF_ENDPOINT" \
|
||||
>/dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓${NC}"
|
||||
((DELETED++))
|
||||
else
|
||||
echo -e "${RED}✗${NC}"
|
||||
((FAILED++))
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}🎉 Cleanup complete!${NC}"
|
||||
echo " - Deleted: $DELETED PR build(s)"
|
||||
if [ $FAILED -gt 0 ]; then
|
||||
echo -e " - Failed: ${RED}$FAILED${NC} PR build(s)"
|
||||
fi
|
||||
|
||||
# Optional: List remaining items to verify
|
||||
echo ""
|
||||
echo -e "${YELLOW}📋 Verifying cleanup...${NC}"
|
||||
REMAINING=$(aws s3 ls "s3://${CF_BUCKET_PREVIEW}/unraid-api/tag/" --endpoint-url "$CF_ENDPOINT" 2>/dev/null | grep -c "PRE PR" || true)
|
||||
# Ensure REMAINING is a valid number
|
||||
REMAINING=${REMAINING:-0}
|
||||
echo "Remaining PR builds: $REMAINING"
|
||||
|
||||
if [ "$REMAINING" -eq 0 ]; then
|
||||
echo -e "${GREEN}✅ All PR builds successfully removed${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠️ Some PR builds may still exist${NC}"
|
||||
fi
|
||||
@@ -104,6 +104,7 @@ eslint.configs.recommended, ...tseslint.configs.recommended, // TypeScript Files
|
||||
parser: tseslint.parser,
|
||||
parserOptions: {
|
||||
...commonLanguageOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
@@ -128,6 +129,7 @@ eslint.configs.recommended, ...tseslint.configs.recommended, // TypeScript Files
|
||||
parserOptions: {
|
||||
...commonLanguageOptions,
|
||||
parser: tseslint.parser,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/ui",
|
||||
"version": "4.20.1",
|
||||
"version": "4.22.2",
|
||||
"private": true,
|
||||
"license": "GPL-2.0-or-later",
|
||||
"type": "module",
|
||||
@@ -66,7 +66,7 @@
|
||||
"shadcn-vue": "2.2.0",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"tw-animate-css": "1.3.7",
|
||||
"vue-sonner": "1.3.2"
|
||||
"vue-sonner": "2.0.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.34.0",
|
||||
|
||||
@@ -51,7 +51,7 @@ const classes = computed(() => {
|
||||
});
|
||||
|
||||
const needsBrandGradientBackground = computed(() => {
|
||||
return ['outline-solid', 'outline-primary'].includes(props.variant ?? '');
|
||||
return ['outline', 'outline-solid', 'outline-primary'].includes(props.variant ?? '');
|
||||
});
|
||||
|
||||
const isLink = computed(() => Boolean(props.href));
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
<script lang="ts" setup>
|
||||
import { onMounted } from 'vue';
|
||||
import { Toaster as Sonner, toast, type ToasterProps } from 'vue-sonner';
|
||||
import 'vue-sonner/style.css';
|
||||
|
||||
const props = defineProps<ToasterProps>();
|
||||
// Accept theme as a prop, default to 'light' if not provided
|
||||
interface Props extends ToasterProps {
|
||||
theme?: 'light' | 'dark' | 'system';
|
||||
}
|
||||
|
||||
const props = withDefaults(defineProps<Props>(), {
|
||||
theme: 'light',
|
||||
});
|
||||
|
||||
onMounted(() => {
|
||||
globalThis.toast = toast;
|
||||
@@ -27,3 +35,17 @@ onMounted(() => {
|
||||
}"
|
||||
/>
|
||||
</template>
|
||||
|
||||
<style>
|
||||
/* Override styles for Unraid environment */
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
min-width: inherit !important;
|
||||
}
|
||||
|
||||
/* Override Unraid webgui docker icon styles on sonner containers */
|
||||
[data-sonner-toast] [data-icon]:before,
|
||||
[data-sonner-toast] .fa-docker:before {
|
||||
font-family: inherit !important;
|
||||
content: '' !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -32,7 +32,7 @@ const { teleportTarget } = useTeleport();
|
||||
v-bind="forwarded"
|
||||
:class="
|
||||
cn(
|
||||
'bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 border-muted z-50 min-w-32 overflow-hidden rounded-lg border p-1 shadow-md',
|
||||
'bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 border-muted z-[103] min-w-32 overflow-hidden rounded-lg border p-1 shadow-md',
|
||||
props.class
|
||||
)
|
||||
"
|
||||
|
||||
77
unraid-ui/src/composables/useTeleport.test.ts
Normal file
77
unraid-ui/src/composables/useTeleport.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import useTeleport from '@/composables/useTeleport';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { defineComponent } from 'vue';
|
||||
|
||||
describe('useTeleport', () => {
|
||||
beforeEach(() => {
|
||||
// Clear the DOM before each test
|
||||
document.body.innerHTML = '';
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up virtual container if it exists
|
||||
const virtualContainer = document.getElementById('unraid-api-modals-virtual');
|
||||
if (virtualContainer) {
|
||||
virtualContainer.remove();
|
||||
}
|
||||
// Reset the module to clear the virtualModalContainer variable
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it('should return teleportTarget ref with correct value', () => {
|
||||
const { teleportTarget } = useTeleport();
|
||||
expect(teleportTarget.value).toBe('#unraid-api-modals-virtual');
|
||||
});
|
||||
|
||||
it('should create virtual container element on mount with correct properties', () => {
|
||||
const TestComponent = defineComponent({
|
||||
setup() {
|
||||
const { teleportTarget } = useTeleport();
|
||||
return { teleportTarget };
|
||||
},
|
||||
template: '<div>{{ teleportTarget }}</div>',
|
||||
});
|
||||
|
||||
// Initially, virtual container should not exist
|
||||
expect(document.getElementById('unraid-api-modals-virtual')).toBeNull();
|
||||
|
||||
// Mount the component
|
||||
mount(TestComponent);
|
||||
|
||||
// After mount, virtual container should be created with correct properties
|
||||
const virtualContainer = document.getElementById('unraid-api-modals-virtual');
|
||||
expect(virtualContainer).toBeTruthy();
|
||||
expect(virtualContainer?.className).toBe('unapi');
|
||||
expect(virtualContainer?.style.position).toBe('relative');
|
||||
expect(virtualContainer?.style.zIndex).toBe('999999');
|
||||
expect(virtualContainer?.parentElement).toBe(document.body);
|
||||
});
|
||||
|
||||
it('should reuse existing virtual container within same test', () => {
|
||||
// Manually create the container first
|
||||
const manualContainer = document.createElement('div');
|
||||
manualContainer.id = 'unraid-api-modals-virtual';
|
||||
manualContainer.className = 'unapi';
|
||||
manualContainer.style.position = 'relative';
|
||||
manualContainer.style.zIndex = '999999';
|
||||
document.body.appendChild(manualContainer);
|
||||
|
||||
const TestComponent = defineComponent({
|
||||
setup() {
|
||||
const { teleportTarget } = useTeleport();
|
||||
return { teleportTarget };
|
||||
},
|
||||
template: '<div>{{ teleportTarget }}</div>',
|
||||
});
|
||||
|
||||
// Mount component - should not create a new container
|
||||
mount(TestComponent);
|
||||
|
||||
// Should still have only one container
|
||||
const containers = document.querySelectorAll('#unraid-api-modals-virtual');
|
||||
expect(containers.length).toBe(1);
|
||||
expect(containers[0]).toBe(manualContainer);
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,24 @@
|
||||
import { ensureTeleportContainer } from '@/helpers/ensure-teleport-container';
|
||||
import { onMounted, ref } from 'vue';
|
||||
|
||||
let virtualModalContainer: HTMLDivElement | null = null;
|
||||
|
||||
const ensureVirtualContainer = () => {
|
||||
if (!virtualModalContainer) {
|
||||
virtualModalContainer = document.createElement('div');
|
||||
virtualModalContainer.id = 'unraid-api-modals-virtual';
|
||||
virtualModalContainer.className = 'unapi';
|
||||
virtualModalContainer.style.position = 'relative';
|
||||
virtualModalContainer.style.zIndex = '999999';
|
||||
document.body.appendChild(virtualModalContainer);
|
||||
}
|
||||
return virtualModalContainer;
|
||||
};
|
||||
|
||||
const useTeleport = () => {
|
||||
const teleportTarget = ref<string | HTMLElement>('body');
|
||||
const teleportTarget = ref<string>('#unraid-api-modals-virtual');
|
||||
|
||||
onMounted(() => {
|
||||
const container = ensureTeleportContainer();
|
||||
teleportTarget.value = container;
|
||||
ensureVirtualContainer();
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
/**
|
||||
* Ensures the teleport container exists in the DOM.
|
||||
* This is used by both the standalone mount script and unraid-ui components
|
||||
* to ensure modals and other teleported content have a target.
|
||||
*/
|
||||
export function ensureTeleportContainer(): HTMLElement {
|
||||
const containerId = 'unraid-teleport-container';
|
||||
|
||||
// Check if container already exists
|
||||
let container = document.getElementById(containerId);
|
||||
|
||||
// If it doesn't exist, create it
|
||||
if (!container) {
|
||||
container = document.createElement('div');
|
||||
container.id = containerId;
|
||||
container.style.position = 'relative';
|
||||
container.classList.add('unapi');
|
||||
container.style.zIndex = '999999'; // Very high z-index to ensure it's always on top
|
||||
document.body.appendChild(container);
|
||||
}
|
||||
|
||||
return container;
|
||||
}
|
||||
@@ -15,6 +15,3 @@ export * from '@/lib/utils';
|
||||
export { default as useTeleport } from '@/composables/useTeleport';
|
||||
export { useToast } from '@/composables/useToast';
|
||||
export type { ToastInstance } from '@/composables/useToast';
|
||||
|
||||
// Helpers
|
||||
export { ensureTeleportContainer } from '@/helpers/ensure-teleport-container';
|
||||
|
||||
@@ -51,10 +51,6 @@
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/*.copy.vue",
|
||||
"**/*copy.vue",
|
||||
"**/*.test.ts",
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.tsx",
|
||||
"**/*.spec.tsx"
|
||||
"**/*copy.vue"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -19,6 +19,17 @@ export default function createConfig() {
|
||||
dts({
|
||||
insertTypesEntry: true,
|
||||
include: ['src/**/*.ts', 'src/**/*.vue'],
|
||||
exclude: [
|
||||
'src/**/*.test.ts',
|
||||
'src/**/*.spec.ts',
|
||||
'src/**/*.test.tsx',
|
||||
'src/**/*.spec.tsx',
|
||||
'src/**/*.test.vue',
|
||||
'src/**/*.spec.vue',
|
||||
'src/**/*.stories.*',
|
||||
'src/**/*.stories.{ts,tsx,vue}',
|
||||
'src/**/__tests__/**',
|
||||
],
|
||||
outDir: 'dist',
|
||||
rollupTypes: true,
|
||||
copyDtsFiles: true,
|
||||
@@ -75,6 +86,9 @@ export default function createConfig() {
|
||||
'@/theme': resolve(__dirname, './src/theme'),
|
||||
},
|
||||
},
|
||||
optimizeDeps: {
|
||||
include: ['ajv', 'ajv-errors'],
|
||||
},
|
||||
test: {
|
||||
environment: 'happy-dom',
|
||||
include: ['src/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'],
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
For legacy compatibility, Unraid ships web components to the webgui. These components
|
||||
are written as Vue and turned into web components as a build step. By convention,
|
||||
Vue components that are built as top-level web components are suffixed with `*.ce.vue`
|
||||
Vue components that are built as top-level web components are suffixed with `*.standalone.vue`
|
||||
for "**c**ustom **e**lement", which comes from the tool used for compilation: `nuxt-custom-elements`.
|
||||
|
||||
Note: `nuxt-custom-elements` is currently pinned to a specific version because
|
||||
|
||||
@@ -9,7 +9,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { ComposerTranslation } from 'vue-i18n';
|
||||
|
||||
import WelcomeModal from '~/components/Activation/WelcomeModal.ce.vue';
|
||||
import WelcomeModal from '~/components/Activation/WelcomeModal.standalone.vue';
|
||||
|
||||
vi.mock('@unraid/ui', async (importOriginal) => {
|
||||
const actual = (await importOriginal()) as Record<string, unknown>;
|
||||
@@ -76,7 +76,7 @@ vi.mock('~/store/theme', () => ({
|
||||
useThemeStore: () => mockThemeStore,
|
||||
}));
|
||||
|
||||
describe('Activation/WelcomeModal.ce.vue', () => {
|
||||
describe('Activation/WelcomeModal.standalone.vue', () => {
|
||||
let mockSetProperty: ReturnType<typeof vi.fn>;
|
||||
let mockQuerySelector: ReturnType<typeof vi.fn>;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { ServerconnectPluginInstalled } from '~/types/server';
|
||||
|
||||
import Auth from '~/components/Auth.ce.vue';
|
||||
import Auth from '~/components/Auth.standalone.vue';
|
||||
import { useServerStore } from '~/store/server';
|
||||
|
||||
vi.mock('vue-i18n', () => ({
|
||||
|
||||
@@ -12,7 +12,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { MockInstance } from 'vitest';
|
||||
|
||||
import ColorSwitcher from '~/components/ColorSwitcher.ce.vue';
|
||||
import ColorSwitcher from '~/components/ColorSwitcher.standalone.vue';
|
||||
import { useThemeStore } from '~/store/theme';
|
||||
|
||||
// Explicitly mock @unraid/ui to ensure we use the actual components
|
||||
|
||||
@@ -8,7 +8,7 @@ import { mount } from '@vue/test-utils';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import DowngradeOs from '~/components/DowngradeOs.ce.vue';
|
||||
import DowngradeOs from '~/components/DowngradeOs.standalone.vue';
|
||||
import { useServerStore } from '~/store/server';
|
||||
|
||||
vi.mock('crypto-js/aes', () => ({
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user