mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
85 Commits
feat/build
...
fix/build-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d9ce0aa3d | ||
|
|
9714b21c5c | ||
|
|
44b4d77d80 | ||
|
|
3f5039c342 | ||
|
|
1d2c6701ce | ||
|
|
0ee09aefbb | ||
|
|
c60a51dc1b | ||
|
|
c4fbf698b4 | ||
|
|
00faa8f9d9 | ||
|
|
45d9d65c13 | ||
|
|
771014b005 | ||
|
|
31a255c928 | ||
|
|
167857a323 | ||
|
|
b80988aaab | ||
|
|
fe4a6451f1 | ||
|
|
9a86c615da | ||
|
|
25ff8992a5 | ||
|
|
45fb53d040 | ||
|
|
c855caa9b2 | ||
|
|
ba4a43aec8 | ||
|
|
c4ca761dfc | ||
|
|
01d353fa08 | ||
|
|
4a07953457 | ||
|
|
0b20e3ea9f | ||
|
|
3f4af09db5 | ||
|
|
222ced7518 | ||
|
|
03dae7ce66 | ||
|
|
0990b898bd | ||
|
|
95faeaa2f3 | ||
|
|
b49ef5a762 | ||
|
|
c782cf0e87 | ||
|
|
f95ca9c9cb | ||
|
|
a59b363ebc | ||
|
|
2fef10c94a | ||
|
|
1c73a4af42 | ||
|
|
88a924c84f | ||
|
|
ae4d3ecbc4 | ||
|
|
c569043ab5 | ||
|
|
50ea2a3ffb | ||
|
|
b518131406 | ||
|
|
e57d81e073 | ||
|
|
88baddd6c0 | ||
|
|
abc22bdb87 | ||
|
|
6ed2f5ce8e | ||
|
|
b79b44e95c | ||
|
|
ca22285a26 | ||
|
|
838be2c52e | ||
|
|
73c1100d0b | ||
|
|
434e331384 | ||
|
|
a27453fda8 | ||
|
|
98e6058cd8 | ||
|
|
6c2c51ae1d | ||
|
|
d10c12035e | ||
|
|
5dd6f42550 | ||
|
|
4759b3d0b3 | ||
|
|
daeeba8c1f | ||
|
|
196bd52628 | ||
|
|
6c0061923a | ||
|
|
f33afe7ae5 | ||
|
|
aecf70ffad | ||
|
|
785f1f5eb1 | ||
|
|
193be3df36 | ||
|
|
116ee88fcf | ||
|
|
413db4bd30 | ||
|
|
095c2221c9 | ||
|
|
dfe891ce38 | ||
|
|
797bf50ec7 | ||
|
|
af5ca11860 | ||
|
|
f0cffbdc7a | ||
|
|
16905dd3a6 | ||
|
|
2ecdb99052 | ||
|
|
286f1be8ed | ||
|
|
bcefdd5261 | ||
|
|
d3459ecbc6 | ||
|
|
534a07788b | ||
|
|
239cdd6133 | ||
|
|
77cfc07dda | ||
|
|
728b38ac11 | ||
|
|
44774d0acd | ||
|
|
e204eb80a0 | ||
|
|
0c727c37f4 | ||
|
|
292bc0fc81 | ||
|
|
53f501e1a7 | ||
|
|
6cf7c88242 | ||
|
|
33774aa596 |
@@ -1,123 +1,3 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
"permissions": {}
|
||||
}
|
||||
66
.github/workflows/build-plugin.yml
vendored
66
.github/workflows/build-plugin.yml
vendored
@@ -36,6 +36,8 @@ on:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
@@ -49,21 +51,16 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
@@ -74,14 +71,6 @@ jobs:
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
@@ -97,7 +86,7 @@ jobs:
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
@@ -151,7 +140,7 @@ jobs:
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
inputs: '{ "version": "v${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
@@ -181,3 +170,40 @@ jobs:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
- name: Clean up old preview builds
|
||||
if: inputs.RELEASE_CREATED == 'false' && github.event_name == 'push'
|
||||
continue-on-error: true
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
echo "🧹 Cleaning up old preview builds (keeping last 7 days)..."
|
||||
|
||||
# Calculate cutoff date (7 days ago)
|
||||
CUTOFF_DATE=$(date -d "7 days ago" +"%Y.%m.%d")
|
||||
echo "Deleting builds older than: ${CUTOFF_DATE}"
|
||||
|
||||
# List and delete old timestamped .txz files
|
||||
OLD_FILES=$(aws s3 ls "s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} --recursive | \
|
||||
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
|
||||
awk '{print $4}' || true)
|
||||
|
||||
DELETED_COUNT=0
|
||||
if [ -n "$OLD_FILES" ]; then
|
||||
while IFS= read -r file; do
|
||||
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
|
||||
FILE_DATE="${BASH_REMATCH[1]}"
|
||||
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
|
||||
echo "Deleting old build: $(basename "$file")"
|
||||
aws s3 rm "s3://${{ secrets.CF_BUCKET_PREVIEW }}/${file}" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} || true
|
||||
((DELETED_COUNT++))
|
||||
fi
|
||||
fi
|
||||
done <<< "$OLD_FILES"
|
||||
fi
|
||||
|
||||
echo "✅ Deleted ${DELETED_COUNT} old builds"
|
||||
|
||||
13
.github/workflows/deploy-storybook.yml
vendored
13
.github/workflows/deploy-storybook.yml
vendored
@@ -22,16 +22,17 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
@@ -65,7 +66,7 @@ jobs:
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
153
.github/workflows/main.yml
vendored
153
.github/workflows/main.yml
vendored
@@ -6,29 +6,15 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
test-api:
|
||||
name: Test API
|
||||
defaults:
|
||||
@@ -38,36 +24,25 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -191,29 +166,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -244,7 +206,7 @@ jobs:
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
@@ -268,29 +230,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -333,31 +282,17 @@ jobs:
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
@@ -385,12 +320,34 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/standalone-apps
|
||||
path: web/dist
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on pushes to main AND after tests pass
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
@@ -414,9 +371,6 @@ jobs:
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
@@ -430,3 +384,4 @@ jobs:
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
100
.github/workflows/push-staging-pr-on-close.yml
vendored
100
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -1,4 +1,9 @@
|
||||
name: Push Staging Plugin on PR Close
|
||||
name: Replace PR Plugin with Staging Redirect on Merge
|
||||
|
||||
# This workflow runs when a PR is merged and replaces the PR-specific plugin
|
||||
# with a redirect version that points to the main staging URL.
|
||||
# This ensures users who installed the PR version will automatically
|
||||
# update to the staging version on their next update check.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,18 +22,13 @@ on:
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
push-staging-redirect:
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
@@ -45,11 +45,12 @@ jobs:
|
||||
name: unraid-plugin-.*
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow: main.yml
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: fail
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
- name: Update Downloaded Plugin to Redirect to Staging
|
||||
run: |
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
@@ -60,23 +61,82 @@ jobs:
|
||||
fi
|
||||
|
||||
echo "Found plugin file: $plgfile"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Get current version and bump it with current timestamp
|
||||
current_version=$(grep '<!ENTITY version' "${plgfile}" | sed -E 's/.*"(.*)".*/\1/')
|
||||
echo "Current version: ${current_version}"
|
||||
|
||||
# Create new version with current timestamp (ensures it's newer)
|
||||
new_version=$(date +"%Y.%m.%d.%H%M")
|
||||
echo "New redirect version: ${new_version}"
|
||||
|
||||
# Update version to trigger update
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${new_version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
# Change the plugin url to point to staging - users will switch to staging on next update
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
|
||||
echo "Modified plugin to redirect to: ${url}"
|
||||
echo "Version bumped from ${current_version} to ${new_version}"
|
||||
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
- name: Clean up old PR artifacts from Cloudflare
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Delete all existing files in the PR directory first (txz, plg, etc.)
|
||||
aws s3 rm s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/ \
|
||||
--recursive \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
echo "✅ Cleaned up old PR artifacts"
|
||||
|
||||
- name: Upload PR Redirect Plugin to Cloudflare
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Upload only the redirect plugin file
|
||||
aws s3 cp pr-release/dynamix.unraid.net.plg \
|
||||
s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
echo "✅ Uploaded redirect plugin"
|
||||
|
||||
- name: Output redirect information
|
||||
run: |
|
||||
echo "✅ PR plugin replaced with staging redirect version"
|
||||
echo "PR URL remains: https://preview.dl.unraid.net/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg"
|
||||
echo "Redirects users to staging: https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
echo "Users updating from this PR version will automatically switch to staging"
|
||||
|
||||
- name: Comment on PR about staging redirect
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: pr-closed-staging
|
||||
mode: recreate
|
||||
message: |
|
||||
## 🔄 PR Merged - Plugin Redirected to Staging
|
||||
|
||||
This PR has been merged and the preview plugin has been updated to redirect to the staging version.
|
||||
|
||||
**For users testing this PR:**
|
||||
- Your plugin will automatically update to the staging version on the next update check
|
||||
- The staging version includes all merged changes from this PR
|
||||
- No manual intervention required
|
||||
|
||||
**Staging URL:**
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
Thank you for testing! 🚀
|
||||
|
||||
26
.github/workflows/release-production.yml
vendored
26
.github/workflows/release-production.yml
vendored
@@ -28,16 +28,16 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
node-version: 22.19.0
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -124,3 +124,23 @@ jobs:
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Discord Webhook Notification
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
username: "Unraid API Bot"
|
||||
avatar-url: "https://craftassets.unraid.net/uploads/logos/un-mark-gradient.png"
|
||||
embed-title: "🚀 Unraid API ${{ inputs.version }} Released!"
|
||||
embed-url: "https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}"
|
||||
embed-description: |
|
||||
A new version of Unraid API has been released!
|
||||
|
||||
**Version:** `${{ inputs.version }}`
|
||||
**Release Page:** [View on GitHub](https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }})
|
||||
|
||||
**📋 Changelog:**
|
||||
${{ steps.release-info.outputs.body }}
|
||||
embed-color: 16734296
|
||||
embed-footer-text: "Unraid API • Automated Release"
|
||||
embed-timestamp: true
|
||||
|
||||
71
.github/workflows/test-libvirt.yml
vendored
71
.github/workflows/test-libvirt.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Test Libvirt
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.7"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.15.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('libvirt/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: test
|
||||
run: pnpm run test
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -29,6 +29,10 @@ unraid-ui/node_modules/
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Auto-generated type declarations for Nuxt UI
|
||||
auto-imports.d.ts
|
||||
components.d.ts
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
@@ -118,3 +122,4 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.18.2"}
|
||||
{".":"4.22.2"}
|
||||
|
||||
@@ -76,4 +76,21 @@ body {
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
--text-lg: 1.125rem;
|
||||
--text-xl: 1.25rem;
|
||||
--text-2xl: 1.5rem;
|
||||
--text-3xl: 1.875rem;
|
||||
--text-4xl: 2.25rem;
|
||||
--text-5xl: 3rem;
|
||||
--text-6xl: 3.75rem;
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
@@ -2,9 +2,59 @@
|
||||
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
/* Override Tailwind v4 global styles to use webgui variables */
|
||||
--ui-bg: var(--background-color) !important;
|
||||
--ui-text: var(--text-color) !important;
|
||||
/* Nuxt UI Color System - Primary (Orange for Unraid) */
|
||||
--ui-color-primary-50: #fff7ed;
|
||||
--ui-color-primary-100: #ffedd5;
|
||||
--ui-color-primary-200: #fed7aa;
|
||||
--ui-color-primary-300: #fdba74;
|
||||
--ui-color-primary-400: #fb923c;
|
||||
--ui-color-primary-500: #ff8c2f;
|
||||
--ui-color-primary-600: #ea580c;
|
||||
--ui-color-primary-700: #c2410c;
|
||||
--ui-color-primary-800: #9a3412;
|
||||
--ui-color-primary-900: #7c2d12;
|
||||
--ui-color-primary-950: #431407;
|
||||
|
||||
/* Nuxt UI Color System - Neutral (True Gray) */
|
||||
--ui-color-neutral-50: #fafafa;
|
||||
--ui-color-neutral-100: #f5f5f5;
|
||||
--ui-color-neutral-200: #e5e5e5;
|
||||
--ui-color-neutral-300: #d4d4d4;
|
||||
--ui-color-neutral-400: #a3a3a3;
|
||||
--ui-color-neutral-500: #737373;
|
||||
--ui-color-neutral-600: #525252;
|
||||
--ui-color-neutral-700: #404040;
|
||||
--ui-color-neutral-800: #262626;
|
||||
--ui-color-neutral-900: #171717;
|
||||
--ui-color-neutral-950: #0a0a0a;
|
||||
|
||||
/* Nuxt UI Default color shades */
|
||||
--ui-primary: var(--ui-color-primary-500);
|
||||
--ui-secondary: var(--ui-color-neutral-500);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-400);
|
||||
--ui-text-muted: var(--ui-color-neutral-500);
|
||||
--ui-text-toned: var(--ui-color-neutral-600);
|
||||
--ui-text: var(--ui-color-neutral-700);
|
||||
--ui-text-highlighted: var(--ui-color-neutral-900);
|
||||
--ui-text-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Background */
|
||||
--ui-bg: white;
|
||||
--ui-bg-muted: var(--ui-color-neutral-50);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-100);
|
||||
--ui-bg-accented: var(--ui-color-neutral-200);
|
||||
--ui-bg-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Border */
|
||||
--ui-border: var(--ui-color-neutral-200);
|
||||
--ui-border-muted: var(--ui-color-neutral-200);
|
||||
--ui-border-accented: var(--ui-color-neutral-300);
|
||||
--ui-border-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Radius */
|
||||
--ui-radius: 0.5rem;
|
||||
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
@@ -16,7 +66,7 @@
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
@@ -24,7 +74,7 @@
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
@@ -34,9 +84,30 @@
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
/* Override Tailwind v4 global styles to use webgui variables */
|
||||
--ui-bg: var(--background-color) !important;
|
||||
--ui-text: var(--text-color) !important;
|
||||
/* Nuxt UI Default color shades - Dark mode */
|
||||
--ui-primary: var(--ui-color-primary-400);
|
||||
--ui-secondary: var(--ui-color-neutral-400);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text (Dark) */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-500);
|
||||
--ui-text-muted: var(--ui-color-neutral-400);
|
||||
--ui-text-toned: var(--ui-color-neutral-300);
|
||||
--ui-text: var(--ui-color-neutral-200);
|
||||
--ui-text-highlighted: white;
|
||||
--ui-text-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Background (Dark) */
|
||||
--ui-bg: var(--ui-color-neutral-900);
|
||||
--ui-bg-muted: var(--ui-color-neutral-800);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-800);
|
||||
--ui-bg-accented: var(--ui-color-neutral-700);
|
||||
--ui-bg-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Border (Dark) */
|
||||
--ui-border: var(--ui-color-neutral-800);
|
||||
--ui-border-muted: var(--ui-color-neutral-700);
|
||||
--ui-border-accented: var(--ui-color-neutral-700);
|
||||
--ui-border-inverted: white;
|
||||
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
@@ -48,15 +119,15 @@
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
|
||||
@@ -1,699 +0,0 @@
|
||||
/**------------------------------------------------------------------------------------------------
|
||||
* SONNER.CSS
|
||||
* This is a copy of Sonner's `style.css` as of commit a5b77c2df08d5c05aa923170176168102855533d
|
||||
*
|
||||
* This was necessary because I couldn't find a simple way to include Sonner's styles in vite's
|
||||
* css build output. They wouldn't show up even though the toaster was included, and vue-sonner
|
||||
* currently doesn't export its stylesheet (it appears to be inlined, but styles weren't applied
|
||||
* to the unraid-toaster component for some reason).
|
||||
*------------------------------------------------------------------------------------------------**/
|
||||
:where(html[dir='ltr']),
|
||||
:where([data-sonner-toaster][dir='ltr']) {
|
||||
--toast-icon-margin-start: -3px;
|
||||
--toast-icon-margin-end: 4px;
|
||||
--toast-svg-margin-start: -1px;
|
||||
--toast-svg-margin-end: 0px;
|
||||
--toast-button-margin-start: auto;
|
||||
--toast-button-margin-end: 0;
|
||||
--toast-close-button-start: 0;
|
||||
--toast-close-button-end: unset;
|
||||
--toast-close-button-transform: translate(-35%, -35%);
|
||||
}
|
||||
|
||||
:where(html[dir='rtl']),
|
||||
:where([data-sonner-toaster][dir='rtl']) {
|
||||
--toast-icon-margin-start: 4px;
|
||||
--toast-icon-margin-end: -3px;
|
||||
--toast-svg-margin-start: 0px;
|
||||
--toast-svg-margin-end: -1px;
|
||||
--toast-button-margin-start: 0;
|
||||
--toast-button-margin-end: auto;
|
||||
--toast-close-button-start: unset;
|
||||
--toast-close-button-end: 0;
|
||||
--toast-close-button-transform: translate(35%, -35%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster]) {
|
||||
position: fixed;
|
||||
width: var(--width);
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Helvetica Neue, Arial,
|
||||
Noto Sans, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
|
||||
--gray1: hsl(0, 0%, 99%);
|
||||
--gray2: hsl(0, 0%, 97.3%);
|
||||
--gray3: hsl(0, 0%, 95.1%);
|
||||
--gray4: hsl(0, 0%, 93%);
|
||||
--gray5: hsl(0, 0%, 90.9%);
|
||||
--gray6: hsl(0, 0%, 88.7%);
|
||||
--gray7: hsl(0, 0%, 85.8%);
|
||||
--gray8: hsl(0, 0%, 78%);
|
||||
--gray9: hsl(0, 0%, 56.1%);
|
||||
--gray10: hsl(0, 0%, 52.3%);
|
||||
--gray11: hsl(0, 0%, 43.5%);
|
||||
--gray12: hsl(0, 0%, 9%);
|
||||
--border-radius: 8px;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
outline: none;
|
||||
z-index: 999999999;
|
||||
transition: transform 400ms ease;
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: translateY(-10px);
|
||||
}
|
||||
|
||||
@media (hover: none) and (pointer: coarse) {
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='right']) {
|
||||
right: max(var(--offset), env(safe-area-inset-right));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='left']) {
|
||||
left: max(var(--offset), env(safe-area-inset-left));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='center']) {
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='top']) {
|
||||
top: max(var(--offset), env(safe-area-inset-top));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='bottom']) {
|
||||
bottom: max(var(--offset), env(safe-area-inset-bottom));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) {
|
||||
--y: translateY(100%);
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
z-index: var(--z-index);
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
transform: var(--y);
|
||||
filter: blur(0);
|
||||
/* https://stackoverflow.com/questions/48124372/pointermove-event-not-working-with-touch-why-not */
|
||||
touch-action: none;
|
||||
transition: transform 400ms, opacity 400ms, height 400ms, box-shadow 200ms;
|
||||
box-sizing: border-box;
|
||||
outline: none;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-styled='true']) {
|
||||
padding: 16px;
|
||||
background: var(--normal-bg);
|
||||
border: 1px solid var(--normal-border);
|
||||
color: var(--normal-text);
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1);
|
||||
width: var(--width);
|
||||
font-size: 13px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]:focus-visible) {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top']) {
|
||||
top: 0;
|
||||
--y: translateY(-100%);
|
||||
--lift: 1;
|
||||
--lift-amount: calc(1 * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom']) {
|
||||
bottom: 0;
|
||||
--y: translateY(100%);
|
||||
--lift: -1;
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-description]) {
|
||||
font-weight: 400;
|
||||
line-height: 1.4;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-title]) {
|
||||
font-weight: 500;
|
||||
line-height: 1.5;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) {
|
||||
display: flex;
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
position: relative;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
margin-left: var(--toast-icon-margin-start);
|
||||
margin-right: var(--toast-icon-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-promise='true']) :where([data-icon]) > svg {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-in 300ms ease forwards;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) > * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) svg {
|
||||
margin-left: var(--toast-svg-margin-start);
|
||||
margin-right: var(--toast-svg-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-content]) {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-styled='true'] [data-button] {
|
||||
border-radius: 4px;
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
height: 24px;
|
||||
font-size: 12px;
|
||||
color: var(--normal-bg);
|
||||
background: var(--normal-text);
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
transition: opacity 400ms, box-shadow 200ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):focus-visible {
|
||||
box-shadow: 0 0 0 2px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):first-of-type {
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-cancel]) {
|
||||
color: var(--normal-text);
|
||||
background: rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-theme='dark']) :where([data-cancel]) {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
position: absolute;
|
||||
left: var(--toast-close-button-start);
|
||||
right: var(--toast-close-button-end);
|
||||
top: 0;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: hsl(var(--foreground));
|
||||
border: 1px solid hsl(var(--border));
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
z-index: 1;
|
||||
transition: opacity 100ms, background 200ms, border-color 200ms;
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: hsl(var(--background));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-disabled='true']) {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: hsl(var(--muted));
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
:where([data-sonner-toast][data-swiping='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top'][data-swiping='true'])::before {
|
||||
/* y 50% needed to distribute height additional height evenly */
|
||||
bottom: 50%;
|
||||
transform: scaleY(3) translateY(50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom'][data-swiping='true'])::before {
|
||||
/* y -50% needed to distribute height additional height evenly */
|
||||
top: 50%;
|
||||
transform: scaleY(3) translateY(-50%);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when transitioning out */
|
||||
:where([data-sonner-toast][data-swiping='false'][data-removed='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
transform: scaleY(2);
|
||||
}
|
||||
|
||||
/* Needed to avoid setting hover to false when inbetween toasts */
|
||||
:where([data-sonner-toast])::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
height: calc(var(--gap) + 1px);
|
||||
bottom: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true']) {
|
||||
--y: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false']) {
|
||||
--scale: var(--toasts-before) * 0.05 + 1;
|
||||
--y: translateY(calc(var(--lift-amount) * var(--toasts-before))) scale(calc(-1 * var(--scale)));
|
||||
height: var(--front-toast-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) > * {
|
||||
transition: opacity 400ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false'][data-styled='true']) > * {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-visible='false']) {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset)));
|
||||
height: var(--initial-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='true'][data-swipe-out='false']) {
|
||||
--y: translateY(calc(var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='false']) {
|
||||
--y: translateY(40%);
|
||||
opacity: 0;
|
||||
transition: transform 500ms, opacity 200ms;
|
||||
}
|
||||
|
||||
/* Bump up the height to make sure hover state doesn't get set to false */
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'])::before {
|
||||
height: calc(var(--initial-height) + 20%);
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiping='true'] {
|
||||
transform: var(--y) translateY(var(--swipe-amount, 0px));
|
||||
transition: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiped='true'] {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='bottom'],
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='top'] {
|
||||
animation: swipe-out 200ms ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes swipe-out {
|
||||
from {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount)));
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
to {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
[data-sonner-toaster] {
|
||||
position: fixed;
|
||||
--mobile-offset: 16px;
|
||||
right: var(--mobile-offset);
|
||||
left: var(--mobile-offset);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][dir='rtl'] {
|
||||
left: calc(var(--mobile-offset) * -1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster] [data-sonner-toast] {
|
||||
left: 0;
|
||||
right: 0;
|
||||
width: calc(100% - var(--mobile-offset) * 2);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='left'] {
|
||||
left: var(--mobile-offset);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='bottom'] {
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='top'] {
|
||||
top: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='center'] {
|
||||
left: var(--mobile-offset);
|
||||
right: var(--mobile-offset);
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(208, 100%, 97%);
|
||||
--info-border: hsl(221, 91%, 91%);
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(49, 100%, 97%);
|
||||
--warning-border: hsl(49, 91%, 91%);
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%); */
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: hsl(0 0% 3.9%);
|
||||
--normal-border: hsl(0 0% 14.9%);
|
||||
--normal-text: hsl(0 0% 98%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: hsl(0 0% 100%);
|
||||
--normal-border: hsl(0 0% 89.8%);
|
||||
--normal-text: hsl(0 0% 3.9%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(215, 100%, 6%);
|
||||
--info-border: hsl(223, 100%, 12%);
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(64, 100%, 6%);
|
||||
--warning-border: hsl(60, 100%, 12%);
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%); */
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] [data-close-button] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] [data-close-button] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] [data-close-button] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] [data-close-button] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper {
|
||||
--size: 16px;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper[data-visible='false'] {
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-out 0.2s ease forwards;
|
||||
}
|
||||
|
||||
.sonner-spinner {
|
||||
position: relative;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
}
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: hsl(var(--muted-foreground));
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
position: absolute;
|
||||
top: -3.9%;
|
||||
width: 24%;
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(1) {
|
||||
animation-delay: -1.2s;
|
||||
transform: rotate(0.0001deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(2) {
|
||||
animation-delay: -1.1s;
|
||||
transform: rotate(30deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(3) {
|
||||
animation-delay: -1s;
|
||||
transform: rotate(60deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(4) {
|
||||
animation-delay: -0.9s;
|
||||
transform: rotate(90deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(5) {
|
||||
animation-delay: -0.8s;
|
||||
transform: rotate(120deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(6) {
|
||||
animation-delay: -0.7s;
|
||||
transform: rotate(150deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(7) {
|
||||
animation-delay: -0.6s;
|
||||
transform: rotate(180deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(8) {
|
||||
animation-delay: -0.5s;
|
||||
transform: rotate(210deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(9) {
|
||||
animation-delay: -0.4s;
|
||||
transform: rotate(240deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(10) {
|
||||
animation-delay: -0.3s;
|
||||
transform: rotate(270deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(11) {
|
||||
animation-delay: -0.2s;
|
||||
transform: rotate(300deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(12) {
|
||||
animation-delay: -0.1s;
|
||||
transform: rotate(330deg) translate(146%);
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-out {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-spin {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.15;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
[data-sonner-toast],
|
||||
[data-sonner-toast] > *,
|
||||
.sonner-loading-bar {
|
||||
transition: none !important;
|
||||
animation: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.sonner-loader {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
transform-origin: center;
|
||||
transition: opacity 200ms, transform 200ms;
|
||||
}
|
||||
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
92
@tailwind-shared/theme-variants.css
Normal file
92
@tailwind-shared/theme-variants.css
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Tailwind v4 Theme Variants
|
||||
* Defines theme-specific CSS variables that can be switched via classes
|
||||
* These are applied dynamically based on the theme selected in GraphQL
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -84,23 +84,23 @@
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
/* Header colors - defaults will be overridden by theme */
|
||||
--color-header-text-primary: var(--header-text-primary, #1c1c1c);
|
||||
--color-header-text-secondary: var(--header-text-secondary, #999999);
|
||||
--color-header-background: var(--header-background-color, #f2f2f2);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
/* Legacy colors - defaults (overridden by theme-variants.css) */
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
/* Gradients - defaults (overridden by theme-variants.css) */
|
||||
--color-header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--color-header-gradient-end: rgba(242, 242, 242, 0.85);
|
||||
--color-banner-gradient: none;
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
@@ -167,6 +167,27 @@
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Container sizes adjusted for 10px base font size (1.6x scale) */
|
||||
--container-xs: 32rem;
|
||||
--container-sm: 38.4rem;
|
||||
--container-md: 44.8rem;
|
||||
--container-lg: 51.2rem;
|
||||
--container-xl: 57.6rem;
|
||||
--container-2xl: 67.2rem;
|
||||
--container-3xl: 76.8rem;
|
||||
--container-4xl: 89.6rem;
|
||||
--container-5xl: 102.4rem;
|
||||
--container-6xl: 115.2rem;
|
||||
--container-7xl: 128rem;
|
||||
|
||||
/* Extended width scale for max-w-* utilities */
|
||||
--width-5xl: 102.4rem;
|
||||
--width-6xl: 115.2rem;
|
||||
--width-7xl: 128rem;
|
||||
--width-8xl: 140.8rem;
|
||||
--width-9xl: 153.6rem;
|
||||
--width-10xl: 166.4rem;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
|
||||
@@ -7,7 +7,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/web` - Vue 3 frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
@@ -128,9 +128,6 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
|
||||
@@ -31,3 +31,4 @@ BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
3
api/.gitignore
vendored
3
api/.gitignore
vendored
@@ -93,3 +93,6 @@ dev/local-session
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
|
||||
120
api/CHANGELOG.md
120
api/CHANGELOG.md
@@ -1,5 +1,125 @@
|
||||
# Changelog
|
||||
|
||||
## [4.22.2](https://github.com/unraid/api/compare/v4.22.1...v4.22.2) (2025-09-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** pin dependency conventional-changelog-conventionalcommits to 9.1.0 ([#1697](https://github.com/unraid/api/issues/1697)) ([9a86c61](https://github.com/unraid/api/commit/9a86c615da2e975f568922fa012cc29b3f9cde0e))
|
||||
* **deps:** update dependency filenamify to v7 ([#1703](https://github.com/unraid/api/issues/1703)) ([b80988a](https://github.com/unraid/api/commit/b80988aaabebc4b8dbf2bf31f0764bf2f28e1575))
|
||||
* **deps:** update graphqlcodegenerator monorepo (major) ([#1689](https://github.com/unraid/api/issues/1689)) ([ba4a43a](https://github.com/unraid/api/commit/ba4a43aec863fc30c47dd17370d74daed7f84703))
|
||||
* false positive on verify_install script being external shell ([#1704](https://github.com/unraid/api/issues/1704)) ([31a255c](https://github.com/unraid/api/commit/31a255c9281b29df983d0f5d0475cd5a69790a48))
|
||||
* improve vue mount speed by 10x ([c855caa](https://github.com/unraid/api/commit/c855caa9b2d4d63bead1a992f5c583e00b9ba843))
|
||||
|
||||
## [4.22.1](https://github.com/unraid/api/compare/v4.22.0...v4.22.1) (2025-09-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set input color in SSO field rather than inside of the main.css ([01d353f](https://github.com/unraid/api/commit/01d353fa08a3df688b37a495a204605138f7f71d))
|
||||
|
||||
## [4.22.0](https://github.com/unraid/api/compare/v4.21.0...v4.22.0) (2025-09-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improved update ui ([#1691](https://github.com/unraid/api/issues/1691)) ([a59b363](https://github.com/unraid/api/commit/a59b363ebc1e660f854c55d50fc02c823c2fd0cc))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** update dependency camelcase-keys to v10 ([#1687](https://github.com/unraid/api/issues/1687)) ([95faeaa](https://github.com/unraid/api/commit/95faeaa2f39bf7bd16502698d7530aaa590b286d))
|
||||
* **deps:** update dependency p-retry to v7 ([#1608](https://github.com/unraid/api/issues/1608)) ([c782cf0](https://github.com/unraid/api/commit/c782cf0e8710c6690050376feefda3edb30dd549))
|
||||
* **deps:** update dependency uuid to v13 ([#1688](https://github.com/unraid/api/issues/1688)) ([2fef10c](https://github.com/unraid/api/commit/2fef10c94aae910e95d9f5bcacf7289e2cca6ed9))
|
||||
* **deps:** update dependency vue-sonner to v2 ([#1475](https://github.com/unraid/api/issues/1475)) ([f95ca9c](https://github.com/unraid/api/commit/f95ca9c9cb69725dcf3bb4bcbd0b558a2074e311))
|
||||
* display settings fix for languages on less than 7.2-beta.2.3 ([#1696](https://github.com/unraid/api/issues/1696)) ([03dae7c](https://github.com/unraid/api/commit/03dae7ce66b3409593eeee90cd5b56e2a920ca44))
|
||||
* hide reset help option when sso is being checked ([#1695](https://github.com/unraid/api/issues/1695)) ([222ced7](https://github.com/unraid/api/commit/222ced7518d40c207198a3b8548f0e024bc865b0))
|
||||
* progressFrame white on black ([0990b89](https://github.com/unraid/api/commit/0990b898bd02c231153157c20d5142e5fd4513cd))
|
||||
|
||||
## [4.21.0](https://github.com/unraid/api/compare/v4.20.4...v4.21.0) (2025-09-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add zsh shell detection to install script ([#1539](https://github.com/unraid/api/issues/1539)) ([50ea2a3](https://github.com/unraid/api/commit/50ea2a3ffb82b30152fb85e0fb9b0d178d596efe))
|
||||
* **api:** determine if docker container has update ([#1582](https://github.com/unraid/api/issues/1582)) ([e57d81e](https://github.com/unraid/api/commit/e57d81e0735772758bb85e0b3c89dce15c56635e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* white on white login text ([ae4d3ec](https://github.com/unraid/api/commit/ae4d3ecbc417454ae3c6e02018f8e4c49bbfc902))
|
||||
|
||||
## [4.20.4](https://github.com/unraid/api/compare/v4.20.3...v4.20.4) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* staging PR plugin fixes + UI issues on 7.2 beta ([b79b44e](https://github.com/unraid/api/commit/b79b44e95c65a124313814ab55b0d0a745a799c7))
|
||||
|
||||
## [4.20.3](https://github.com/unraid/api/compare/v4.20.2...v4.20.3) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* header background color issues fixed on 7.2 - thanks Nick! ([73c1100](https://github.com/unraid/api/commit/73c1100d0ba396fe4342f8ce7561017ab821e68b))
|
||||
|
||||
## [4.20.2](https://github.com/unraid/api/compare/v4.20.1...v4.20.2) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* trigger deployment ([a27453f](https://github.com/unraid/api/commit/a27453fda81e4eeb07f257e60516bebbbc27cf7a))
|
||||
|
||||
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* adjust header styles to fix flashing and width issues - thanks ZarZ ([4759b3d](https://github.com/unraid/api/commit/4759b3d0b3fb6bc71636f75f807cd6f4f62305d1))
|
||||
|
||||
## [4.20.0](https://github.com/unraid/api/compare/v4.19.1...v4.20.0) (2025-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **disks:** add isSpinning field to Disk type ([#1527](https://github.com/unraid/api/issues/1527)) ([193be3d](https://github.com/unraid/api/commit/193be3df3672514be9904e3d4fbdff776470afc0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* better component loading to prevent per-page strange behavior ([095c222](https://github.com/unraid/api/commit/095c2221c94f144f8ad410a69362b15803765531))
|
||||
* **deps:** pin dependencies ([#1669](https://github.com/unraid/api/issues/1669)) ([413db4b](https://github.com/unraid/api/commit/413db4bd30a06aa69d3ca86e793782854f822589))
|
||||
* **plugin:** add fallback for unraid-api stop in deprecation cleanup ([#1668](https://github.com/unraid/api/issues/1668)) ([797bf50](https://github.com/unraid/api/commit/797bf50ec702ebc8244ff71a8ef1a80ea5cd2169))
|
||||
* prepend 'v' to API version in workflow dispatch inputs ([f0cffbd](https://github.com/unraid/api/commit/f0cffbdc7ac36e7037ab60fe9dddbb2cab4a5e10))
|
||||
* progress frame background color fix ([#1672](https://github.com/unraid/api/issues/1672)) ([785f1f5](https://github.com/unraid/api/commit/785f1f5eb1a1cc8b41f6eb502e4092d149cfbd80))
|
||||
* properly override header values ([#1673](https://github.com/unraid/api/issues/1673)) ([aecf70f](https://github.com/unraid/api/commit/aecf70ffad60c83074347d3d6ec23f73acbd1aee))
|
||||
|
||||
## [4.19.1](https://github.com/unraid/api/compare/v4.19.0...v4.19.1) (2025-09-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* custom path detection to fix setup issues ([#1664](https://github.com/unraid/api/issues/1664)) ([2ecdb99](https://github.com/unraid/api/commit/2ecdb99052f39d89af21bbe7ad3f80b83bb1eaa9))
|
||||
|
||||
## [4.19.0](https://github.com/unraid/api/compare/v4.18.2...v4.19.0) (2025-09-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* mount vue apps, not web components ([#1639](https://github.com/unraid/api/issues/1639)) ([88087d5](https://github.com/unraid/api/commit/88087d5201992298cdafa791d5d1b5bb23dcd72b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api version json response ([#1653](https://github.com/unraid/api/issues/1653)) ([292bc0f](https://github.com/unraid/api/commit/292bc0fc810a0d0f0cce6813b0631ff25099cc05))
|
||||
* enhance DOM validation and cleanup in vue-mount-app ([6cf7c88](https://github.com/unraid/api/commit/6cf7c88242f2f4fe9f83871560039767b5b90273))
|
||||
* enhance getKeyFile function to handle missing key file gracefully ([#1659](https://github.com/unraid/api/issues/1659)) ([728b38a](https://github.com/unraid/api/commit/728b38ac11faeacd39ce9d0157024ad140e29b36))
|
||||
* info alert docker icon ([#1661](https://github.com/unraid/api/issues/1661)) ([239cdd6](https://github.com/unraid/api/commit/239cdd6133690699348e61f68e485d2b54fdcbdb))
|
||||
* oidc cache busting issues fixed ([#1656](https://github.com/unraid/api/issues/1656)) ([e204eb8](https://github.com/unraid/api/commit/e204eb80a00ab9242e3dca4ccfc3e1b55a7694b7))
|
||||
* **plugin:** restore cleanup behavior for unsupported unraid versions ([#1658](https://github.com/unraid/api/issues/1658)) ([534a077](https://github.com/unraid/api/commit/534a07788b76de49e9ba14059a9aed0bf16e02ca))
|
||||
* UnraidToaster component and update dialog close button ([#1657](https://github.com/unraid/api/issues/1657)) ([44774d0](https://github.com/unraid/api/commit/44774d0acdd25aa33cb60a5d0b4f80777f4068e5))
|
||||
* vue mounting logic with tests ([#1651](https://github.com/unraid/api/issues/1651)) ([33774aa](https://github.com/unraid/api/commit/33774aa596124a031a7452b62ca4c43743a09951))
|
||||
|
||||
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)
|
||||
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ const config: CodegenConfig = {
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
BigInt: 'number',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
@@ -24,6 +25,7 @@ const config: CodegenConfig = {
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
BigInt: 'z.number()',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.18.1",
|
||||
"version": "4.22.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
247
api/docs/developer/feature-flags.md
Normal file
247
api/docs/developer/feature-flags.md
Normal file
@@ -0,0 +1,247 @@
|
||||
# Feature Flags
|
||||
|
||||
Feature flags allow you to conditionally enable or disable functionality in the Unraid API. This is useful for gradually rolling out new features, A/B testing, or keeping experimental code behind flags during development.
|
||||
|
||||
## Setting Up Feature Flags
|
||||
|
||||
### 1. Define the Feature Flag
|
||||
|
||||
Feature flags are defined as environment variables and collected in `src/consts.ts`:
|
||||
|
||||
```typescript
|
||||
// src/environment.ts
|
||||
export const ENABLE_MY_NEW_FEATURE = process.env.ENABLE_MY_NEW_FEATURE === 'true';
|
||||
|
||||
// src/consts.ts
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
ENABLE_MY_NEW_FEATURE, // Add your new flag here
|
||||
});
|
||||
```
|
||||
|
||||
### 2. Set the Environment Variable
|
||||
|
||||
Set the environment variable when running the API:
|
||||
|
||||
```bash
|
||||
ENABLE_MY_NEW_FEATURE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add it to your `.env` file:
|
||||
|
||||
```env
|
||||
ENABLE_MY_NEW_FEATURE=true
|
||||
```
|
||||
|
||||
## Using Feature Flags in GraphQL
|
||||
|
||||
### Method 1: @UseFeatureFlag Decorator (Schema-Level)
|
||||
|
||||
The `@UseFeatureFlag` decorator conditionally includes or excludes GraphQL fields, queries, and mutations from the schema based on feature flags. When a feature flag is disabled, the field won't appear in the GraphQL schema at all.
|
||||
|
||||
```typescript
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { Query, Mutation, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
// Conditionally include a query
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Query(() => String)
|
||||
async experimentalQuery() {
|
||||
return 'This query only exists when ENABLE_MY_NEW_FEATURE is true';
|
||||
}
|
||||
|
||||
// Conditionally include a mutation
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Mutation(() => Boolean)
|
||||
async experimentalMutation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Conditionally include a field resolver
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@ResolveField(() => String)
|
||||
async experimentalField() {
|
||||
return 'This field only exists when the flag is enabled';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Clean schema - disabled features don't appear in GraphQL introspection
|
||||
- No runtime overhead for disabled features
|
||||
- Clear feature boundaries
|
||||
|
||||
**Use when:**
|
||||
- You want to completely hide features from the GraphQL schema
|
||||
- The feature is experimental or in beta
|
||||
- You're doing a gradual rollout
|
||||
|
||||
### Method 2: checkFeatureFlag Function (Runtime)
|
||||
|
||||
The `checkFeatureFlag` function provides runtime feature flag checking within resolver methods. It throws a `ForbiddenException` if the feature is disabled.
|
||||
|
||||
```typescript
|
||||
import { checkFeatureFlag } from '@app/unraid-api/utils/feature-flag.helper.js';
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { Query, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
@Query(() => String)
|
||||
async myQuery(
|
||||
@Args('useNewAlgorithm', { nullable: true }) useNewAlgorithm?: boolean
|
||||
) {
|
||||
// Conditionally use new logic based on feature flag
|
||||
if (useNewAlgorithm) {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
return this.newAlgorithm();
|
||||
}
|
||||
|
||||
return this.oldAlgorithm();
|
||||
}
|
||||
|
||||
@ResolveField(() => String)
|
||||
async dataField() {
|
||||
// Check flag at the start of the method
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
|
||||
// Feature-specific logic here
|
||||
return this.computeExperimentalData();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- More granular control within methods
|
||||
- Can conditionally execute parts of a method
|
||||
- Useful for A/B testing scenarios
|
||||
- Good for gradual migration strategies
|
||||
|
||||
**Use when:**
|
||||
- You need conditional logic within a method
|
||||
- The field should exist but behavior changes based on the flag
|
||||
- You're migrating from old to new implementation gradually
|
||||
|
||||
## Feature Flag Patterns
|
||||
|
||||
### Pattern 1: Complete Feature Toggle
|
||||
|
||||
Hide an entire feature behind a flag:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_DOCKER_TEMPLATES')
|
||||
@Resolver(() => DockerTemplate)
|
||||
export class DockerTemplateResolver {
|
||||
// All resolvers in this class are toggled by the flag
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 2: Gradual Migration
|
||||
|
||||
Migrate from old to new implementation:
|
||||
|
||||
```typescript
|
||||
@Query(() => [Container])
|
||||
async getContainers(@Args('version') version?: string) {
|
||||
if (version === 'v2') {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_CONTAINERS_V2');
|
||||
return this.getContainersV2();
|
||||
}
|
||||
|
||||
return this.getContainersV1();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 3: Beta Features
|
||||
|
||||
Mark features as beta:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => BetaMetrics, {
|
||||
description: 'BETA: Advanced metrics (requires ENABLE_BETA_FEATURES flag)'
|
||||
})
|
||||
async betaMetrics() {
|
||||
return this.computeBetaMetrics();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 4: Performance Optimizations
|
||||
|
||||
Toggle expensive operations:
|
||||
|
||||
```typescript
|
||||
@ResolveField(() => Statistics)
|
||||
async statistics() {
|
||||
const basicStats = await this.getBasicStats();
|
||||
|
||||
try {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_ADVANCED_ANALYTICS');
|
||||
const advancedStats = await this.getAdvancedStats();
|
||||
return { ...basicStats, ...advancedStats };
|
||||
} catch {
|
||||
// Feature disabled, return only basic stats
|
||||
return basicStats;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing with Feature Flags
|
||||
|
||||
When writing tests for feature-flagged code, create a mock to control feature flag values:
|
||||
|
||||
```typescript
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// Mock the entire consts module
|
||||
vi.mock('@app/consts.js', async () => {
|
||||
const actual = await vi.importActual('@app/consts.js');
|
||||
return {
|
||||
...actual,
|
||||
FeatureFlags: {
|
||||
ENABLE_MY_NEW_FEATURE: true, // Set your test value
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
describe('MyResolver', () => {
|
||||
it('should execute new logic when feature is enabled', async () => {
|
||||
// Test new behavior with mocked flag
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Naming Convention**: Use `ENABLE_` prefix for boolean feature flags
|
||||
2. **Environment Variables**: Always use uppercase with underscores
|
||||
3. **Documentation**: Document what each feature flag controls
|
||||
4. **Cleanup**: Remove feature flags once features are stable and fully rolled out
|
||||
5. **Default State**: New features should default to `false` (disabled)
|
||||
6. **Granularity**: Keep feature flags focused on a single feature or capability
|
||||
7. **Testing**: Always test both enabled and disabled states
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
- **Experimental Features**: Hide unstable features in production
|
||||
- **Gradual Rollouts**: Enable features for specific environments first
|
||||
- **A/B Testing**: Toggle between different implementations
|
||||
- **Performance**: Disable expensive operations when not needed
|
||||
- **Breaking Changes**: Provide migration path with both old and new behavior
|
||||
- **Debug Features**: Enable additional logging or debugging tools
|
||||
|
||||
## Checking Active Feature Flags
|
||||
|
||||
To see which feature flags are currently active:
|
||||
|
||||
```typescript
|
||||
// Log all feature flags on startup
|
||||
console.log('Active Feature Flags:', FeatureFlags);
|
||||
```
|
||||
|
||||
Or check via GraphQL introspection to see which fields are available based on current flags.
|
||||
@@ -139,6 +139,9 @@ type ArrayDisk implements Node {
|
||||
"""ata | nvme | usb | (others)"""
|
||||
transport: String
|
||||
color: ArrayDiskFsColor
|
||||
|
||||
"""Whether the disk is currently spinning"""
|
||||
isSpinning: Boolean
|
||||
}
|
||||
|
||||
interface Node {
|
||||
@@ -346,6 +349,9 @@ type Disk implements Node {
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
|
||||
"""Whether the disk is spinning or not"""
|
||||
isSpinning: Boolean!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
@@ -1044,6 +1050,19 @@ enum ThemeName {
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
}
|
||||
|
||||
"""Update status of a container."""
|
||||
enum UpdateStatus {
|
||||
UP_TO_DATE
|
||||
UPDATE_AVAILABLE
|
||||
REBUILD_READY
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type ContainerPort {
|
||||
ip: String
|
||||
privatePort: Port
|
||||
@@ -1074,8 +1093,8 @@ type DockerContainer implements Node {
|
||||
created: Int!
|
||||
ports: [ContainerPort!]!
|
||||
|
||||
"""Total size of all the files in the container"""
|
||||
sizeRootFs: Int
|
||||
"""Total size of all files in the container (in bytes)"""
|
||||
sizeRootFs: BigInt
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
@@ -1083,6 +1102,8 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
@@ -1113,6 +1134,7 @@ type Docker implements Node {
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
@@ -1361,6 +1383,12 @@ type CpuLoad {
|
||||
|
||||
"""The percentage of time the CPU spent servicing hardware interrupts."""
|
||||
percentIrq: Float!
|
||||
|
||||
"""The percentage of time the CPU spent running virtual machines (guest)."""
|
||||
percentGuest: Float!
|
||||
|
||||
"""The percentage of CPU time stolen by the hypervisor."""
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
@@ -2407,6 +2435,7 @@ type Mutation {
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.18.2",
|
||||
"version": "4.22.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -56,7 +56,7 @@
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-codegen/client-preset": "5.0.0",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
@@ -84,7 +84,7 @@
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
@@ -94,7 +94,7 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.3",
|
||||
"cron": "4.3.0",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
@@ -103,7 +103,7 @@
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
@@ -127,7 +127,7 @@
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "6.2.1",
|
||||
"p-retry": "7.0.0",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
@@ -141,7 +141,7 @@
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "11.1.0",
|
||||
"uuid": "13.0.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
@@ -156,14 +156,14 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/add": "6.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-codegen/fragment-matcher": "6.0.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-codegen/typed-document-node": "6.0.0",
|
||||
"@graphql-codegen/typescript": "5.0.0",
|
||||
"@graphql-codegen/typescript-operations": "5.0.0",
|
||||
"@graphql-codegen/typescript-resolvers": "5.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
@@ -205,7 +205,7 @@
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
|
||||
|
||||
import '@app/core/utils/misc/get-key-file.js';
|
||||
import '@app/store/modules/emhttp.js';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { status } = store.getState().registration;
|
||||
@@ -48,21 +49,70 @@ test('Returns empty key if key location is empty', async () => {
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
test(
|
||||
'Returns decoded key file if key location exists',
|
||||
async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
test('Returns empty string when key file does not exist (ENOENT)', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock readFile to throw ENOENT error
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockRejectedValueOnce(
|
||||
Object.assign(new Error('ENOENT: no such file or directory'), { code: 'ENOENT' })
|
||||
);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Set key file location to a non-existent file
|
||||
freshStore.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '/boot/config/Pro.key',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Should return empty string when file doesn't exist
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock a valid key file content
|
||||
const mockKeyContent =
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w==';
|
||||
const binaryContent = Buffer.from(mockKeyContent, 'base64').toString('binary');
|
||||
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockResolvedValue(binaryContent);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Load state files into store
|
||||
await freshStore.dispatch(loadStateFiles());
|
||||
await freshStore.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = freshStore.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
|
||||
const result = await getKeyFile();
|
||||
expect(result).toBe(
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w'
|
||||
);
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
}, 10000);
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
@@ -17,11 +18,6 @@ const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
@@ -57,7 +53,7 @@ async function deleteTestProcesses() {
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
pm2.delete(processName, () => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
@@ -92,7 +88,7 @@ async function cleanupAllPM2Processes() {
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.killDaemon(() => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
@@ -104,6 +100,9 @@ async function cleanupAllPM2Processes() {
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Set PM2_HOME to use home directory for testing (not /var/log)
|
||||
process.env.PM2_HOME = join(homedir(), '.pm2');
|
||||
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
@@ -198,6 +197,13 @@ describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Disconnect PM2 first to ensure we're testing fresh connection
|
||||
await new Promise<void>((resolve) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
setTimeout(resolve, 100);
|
||||
});
|
||||
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
@@ -12,7 +12,22 @@ import {
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('got', () => {
|
||||
const mockPost = vi.fn();
|
||||
const gotMock = {
|
||||
post: mockPost,
|
||||
};
|
||||
return {
|
||||
default: gotMock,
|
||||
HTTPError: class HTTPError extends Error {
|
||||
response?: any;
|
||||
constructor(response?: any) {
|
||||
super('HTTP Error');
|
||||
this.response = response;
|
||||
}
|
||||
},
|
||||
};
|
||||
});
|
||||
vi.mock('execa');
|
||||
vi.mock('p-retry');
|
||||
vi.mock('node:fs', () => ({
|
||||
@@ -60,7 +75,7 @@ vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
|
||||
describe('RCloneApiService', () => {
|
||||
let service: RCloneApiService;
|
||||
let mockGot: any;
|
||||
let mockGotPost: any;
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
@@ -68,19 +83,19 @@ describe('RCloneApiService', () => {
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { default: got } = await import('got');
|
||||
const got = await import('got');
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockGotPost = vi.mocked(got.default.post);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
mockGotPost.mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
@@ -97,10 +112,10 @@ describe('RCloneApiService', () => {
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
mockGotPost.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -109,15 +124,15 @@ describe('RCloneApiService', () => {
|
||||
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
|
||||
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { providers: mockProviders },
|
||||
});
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual(mockProviders);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/providers',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/providers$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -130,7 +145,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no providers', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
@@ -141,15 +156,15 @@ describe('RCloneApiService', () => {
|
||||
describe('listRemotes', () => {
|
||||
it('should return list of remotes', async () => {
|
||||
const mockRemotes = ['backup-s3', 'drive-storage'];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { remotes: mockRemotes },
|
||||
});
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual(mockRemotes);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/listremotes$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -162,7 +177,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no remotes', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
@@ -174,13 +189,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote details', async () => {
|
||||
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', provider: 'AWS' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteDetails(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/get$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
@@ -197,7 +212,7 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote configuration', async () => {
|
||||
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteConfig(input);
|
||||
|
||||
@@ -213,13 +228,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.createRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/create',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/create$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'new-remote',
|
||||
@@ -243,13 +258,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'NEW_AKIA...' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.updateRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/update',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/update$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'existing-remote',
|
||||
@@ -269,13 +284,13 @@ describe('RCloneApiService', () => {
|
||||
it('should delete a remote', async () => {
|
||||
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.deleteRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/delete$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
@@ -296,13 +311,13 @@ describe('RCloneApiService', () => {
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/sync\/copy$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
srcFs: '/source/path',
|
||||
@@ -323,13 +338,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
mockGotPost.mockResolvedValue({ body: mockStatus });
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
expect(result).toEqual(mockStatus);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/status$/),
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
@@ -348,13 +363,13 @@ describe('RCloneApiService', () => {
|
||||
{ id: 'job-1', status: 'running' },
|
||||
{ id: 'job-2', status: 'finished' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({ body: mockJobs });
|
||||
mockGotPost.mockResolvedValue({ body: mockJobs });
|
||||
|
||||
const result = await service.listRunningJobs();
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/list$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -378,7 +393,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
|
||||
@@ -395,7 +410,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
@@ -412,7 +427,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
@@ -421,17 +436,108 @@ describe('RCloneApiService', () => {
|
||||
|
||||
it('should handle non-HTTP errors', async () => {
|
||||
const networkError = new Error('Network connection failed');
|
||||
mockGot.post.mockRejectedValue(networkError);
|
||||
mockGotPost.mockRejectedValue(networkError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
|
||||
});
|
||||
|
||||
it('should handle unknown errors', async () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
mockGotPost.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkRcloneBinaryExists', () => {
|
||||
beforeEach(() => {
|
||||
// Create a new service instance without initializing for these tests
|
||||
service = new RCloneApiService();
|
||||
});
|
||||
|
||||
it('should return true when rclone version is 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when rclone version is newer than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.75.2\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is older than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.69.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is much older', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.50.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when version cannot be parsed', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone unknown version format',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone binary is not found', async () => {
|
||||
const error = new Error('Command not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
mockExeca.mockRejectedValueOnce(error);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false and log error for other exceptions', async () => {
|
||||
mockExeca.mockRejectedValueOnce(new Error('Some other error'));
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle beta/rc versions correctly', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0-beta.1\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -211,6 +211,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -235,6 +236,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -259,6 +261,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -283,6 +286,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -307,6 +311,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -331,6 +336,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -355,6 +361,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -28,6 +28,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -52,6 +53,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -76,6 +78,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -100,6 +103,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -124,6 +128,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -148,6 +153,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -172,6 +178,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { join } from 'path';
|
||||
|
||||
import type { JSONWebKeySet } from 'jose';
|
||||
|
||||
import { PORT } from '@app/environment.js';
|
||||
import { ENABLE_NEXT_DOCKER_RELEASE, PORT } from '@app/environment.js';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = PORT;
|
||||
@@ -79,3 +79,14 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
/**
|
||||
* Feature flags are used to conditionally enable or disable functionality in the Unraid API.
|
||||
*
|
||||
* Keys are human readable feature flag names -- will be used to construct error messages.
|
||||
*
|
||||
* Values are boolean/truthy values.
|
||||
*/
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -17,30 +17,27 @@ const nullDestination = pino.destination({
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorizeObjects: true,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorizeObjects: false,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (logLevel: string | object, key: string, log: any, extras: any) => {
|
||||
// Use labelColorized which preserves the colors
|
||||
const { labelColorized } = extras;
|
||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||
// Use label instead of labelColorized for non-colored output
|
||||
const { label } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${labelColorized} ${context}]`;
|
||||
return `${label} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
@@ -98,7 +95,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin' });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
|
||||
@@ -16,11 +16,22 @@ export const getKeyFile = async function (appStore: RootState = store.getState()
|
||||
|
||||
const keyFileName = basename(emhttp.var?.regFile);
|
||||
const registrationKeyFilePath = join(paths['keyfile-base'], keyFileName);
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
|
||||
try {
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
} catch (error) {
|
||||
// Handle ENOENT error when Pro.key file doesn't exist
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
// Return empty string when key file is missing (ENOKEYFILE state)
|
||||
return '';
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
@@ -99,7 +98,7 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
@@ -111,3 +110,6 @@ export const PATHS_CONFIG_MODULES =
|
||||
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
@@ -36,6 +36,7 @@ export type IniSlot = {
|
||||
size: string;
|
||||
sizeSb: string;
|
||||
slots: string;
|
||||
spundown: string;
|
||||
status: SlotStatus;
|
||||
temp: string;
|
||||
type: SlotType;
|
||||
@@ -82,6 +83,7 @@ export const parse: StateFileToIniParserMap['disks'] = (disksIni) =>
|
||||
fsType: slot.fsType ?? null,
|
||||
format: slot.format === '-' ? null : slot.format,
|
||||
transport: slot.transport ?? null,
|
||||
isSpinning: slot.spundown ? slot.spundown === '0' : null,
|
||||
};
|
||||
// @TODO Zod Parse This
|
||||
return result;
|
||||
|
||||
@@ -14,6 +14,7 @@ import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { CronModule } from '@app/unraid-api/cron/cron.module.js';
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { GraphModule } from '@app/unraid-api/graph/graph.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
|
||||
@@ -24,7 +25,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
JobModule,
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
|
||||
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
|
||||
|
||||
let API_VERSION_MOCK = '4.18.2+build123';
|
||||
|
||||
vi.mock('@app/environment.js', async (importOriginal) => {
|
||||
const actual = (await importOriginal()) as any;
|
||||
return {
|
||||
...actual,
|
||||
get API_VERSION() {
|
||||
return API_VERSION_MOCK;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('VersionCommand', () => {
|
||||
let command: VersionCommand;
|
||||
let logService: LogService;
|
||||
let consoleLogSpy: MockInstance<typeof console.log>;
|
||||
|
||||
beforeEach(async () => {
|
||||
API_VERSION_MOCK = '4.18.2+build123'; // Reset to default before each test
|
||||
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<VersionCommand>(VersionCommand);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should output version with logger when no options provided', async () => {
|
||||
await command.run([]);
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output version with logger when json option is false', async () => {
|
||||
await command.run([], { json: false });
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output JSON when json option is true', async () => {
|
||||
await command.run([], { json: true });
|
||||
|
||||
expect(logService.info).not.toHaveBeenCalled();
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: 'build123',
|
||||
combined: '4.18.2+build123',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle version without build info', async () => {
|
||||
API_VERSION_MOCK = '4.18.2'; // Set version without build info
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const commandWithoutBuild = module.get<VersionCommand>(VersionCommand);
|
||||
|
||||
await commandWithoutBuild.run([], { json: true });
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: undefined,
|
||||
combined: '4.18.2',
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJson', () => {
|
||||
it('should return true', () => {
|
||||
expect(command.parseJson()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -15,7 +15,7 @@ export type Scalars = {
|
||||
Int: { input: number; output: number; }
|
||||
Float: { input: number; output: number; }
|
||||
/** The `BigInt` scalar type represents non-fractional signed whole numeric values. */
|
||||
BigInt: { input: any; output: any; }
|
||||
BigInt: { input: number; output: number; }
|
||||
/** A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format. */
|
||||
DateTime: { input: string; output: string; }
|
||||
/** The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
|
||||
@@ -241,6 +241,8 @@ export type ArrayDisk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. */
|
||||
idx: Scalars['Int']['output'];
|
||||
/** Whether the disk is currently spinning */
|
||||
isSpinning?: Maybe<Scalars['Boolean']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
/** Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. */
|
||||
numErrors?: Maybe<Scalars['BigInt']['output']>;
|
||||
@@ -448,20 +450,6 @@ export enum ConfigErrorState {
|
||||
WITHDRAWN = 'WITHDRAWN'
|
||||
}
|
||||
|
||||
export type ConfigFile = {
|
||||
__typename?: 'ConfigFile';
|
||||
content: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
path: Scalars['String']['output'];
|
||||
/** Human-readable file size (e.g., "1.5 KB", "2.3 MB") */
|
||||
sizeReadable: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type ConfigFilesResponse = {
|
||||
__typename?: 'ConfigFilesResponse';
|
||||
files: Array<ConfigFile>;
|
||||
};
|
||||
|
||||
export type Connect = Node & {
|
||||
__typename?: 'Connect';
|
||||
/** The status of dynamic remote access */
|
||||
@@ -553,12 +541,16 @@ export type CoreVersions = {
|
||||
/** CPU load for a single core */
|
||||
export type CpuLoad = {
|
||||
__typename?: 'CpuLoad';
|
||||
/** The percentage of time the CPU spent running virtual machines (guest). */
|
||||
percentGuest: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU was idle. */
|
||||
percentIdle: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent servicing hardware interrupts. */
|
||||
percentIrq: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent on low-priority (niced) user space processes. */
|
||||
percentNice: Scalars['Float']['output'];
|
||||
/** The percentage of CPU time stolen by the hypervisor. */
|
||||
percentSteal: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent in kernel space. */
|
||||
percentSystem: Scalars['Float']['output'];
|
||||
/** The total CPU load on a single core, in percent. */
|
||||
@@ -617,6 +609,8 @@ export type Disk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** The interface type of the disk */
|
||||
interfaceType: DiskInterfaceType;
|
||||
/** Whether the disk is spinning or not */
|
||||
isSpinning: Scalars['Boolean']['output'];
|
||||
/** The model name of the disk */
|
||||
name: Scalars['String']['output'];
|
||||
/** The partitions on the disk */
|
||||
@@ -684,6 +678,7 @@ export enum DiskSmartStatus {
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||
containers: Array<DockerContainer>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
networks: Array<DockerNetwork>;
|
||||
@@ -709,13 +704,15 @@ export type DockerContainer = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
image: Scalars['String']['output'];
|
||||
imageId: Scalars['String']['output'];
|
||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||
labels?: Maybe<Scalars['JSON']['output']>;
|
||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||
names: Array<Scalars['String']['output']>;
|
||||
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
||||
ports: Array<ContainerPort>;
|
||||
/** Total size of all the files in the container */
|
||||
sizeRootFs?: Maybe<Scalars['Int']['output']>;
|
||||
/** Total size of all files in the container (in bytes) */
|
||||
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
||||
state: ContainerState;
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
@@ -780,6 +777,12 @@ export type EnableDynamicRemoteAccessInput = {
|
||||
url: AccessUrlInput;
|
||||
};
|
||||
|
||||
export type ExplicitStatusItem = {
|
||||
__typename?: 'ExplicitStatusItem';
|
||||
name: Scalars['String']['output'];
|
||||
updateStatus: UpdateStatus;
|
||||
};
|
||||
|
||||
export type Flash = Node & {
|
||||
__typename?: 'Flash';
|
||||
guid: Scalars['String']['output'];
|
||||
@@ -1235,6 +1238,7 @@ export type Mutation = {
|
||||
rclone: RCloneMutations;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
recalculateOverview: NotificationOverview;
|
||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||
removePlugin: Scalars['Boolean']['output'];
|
||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||
@@ -1645,7 +1649,6 @@ export type PublicPartnerInfo = {
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
allConfigFiles: ConfigFilesResponse;
|
||||
apiKey?: Maybe<ApiKey>;
|
||||
/** All possible permissions for API keys */
|
||||
apiKeyPossiblePermissions: Array<Permission>;
|
||||
@@ -1655,7 +1658,6 @@ export type Query = {
|
||||
array: UnraidArray;
|
||||
cloud: Cloud;
|
||||
config: Config;
|
||||
configFile?: Maybe<ConfigFile>;
|
||||
connect: Connect;
|
||||
customization?: Maybe<Customization>;
|
||||
disk: Disk;
|
||||
@@ -1719,11 +1721,6 @@ export type QueryApiKeyArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryConfigFileArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type QueryDiskArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
@@ -2277,6 +2274,14 @@ export type UpdateSettingsResponse = {
|
||||
warnings?: Maybe<Array<Scalars['String']['output']>>;
|
||||
};
|
||||
|
||||
/** Update status of a container. */
|
||||
export enum UpdateStatus {
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
UP_TO_DATE = 'UP_TO_DATE'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
|
||||
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PM2_HOME: '/var/log/.pm2',
|
||||
PM2_PATH: '/path/to/pm2',
|
||||
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'info',
|
||||
}));
|
||||
|
||||
describe('PM2Service', () => {
|
||||
let pm2Service: PM2Service;
|
||||
let logService: LogService;
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
logService = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
pm2Service = new PM2Service(logService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('ensurePm2Dependencies', () => {
|
||||
it('should create logs directory and log that PM2 will handle its own directory', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
|
||||
expect(logService.trace).toHaveBeenCalledWith(
|
||||
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log error but not throw when logs directory creation fails', async () => {
|
||||
mockMkdir.mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mkdir with recursive flag for nested logs path', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -42,8 +42,22 @@ export class PM2Service {
|
||||
|
||||
async run(context: CmdContext, ...args: string[]) {
|
||||
const { tag, raw, ...execOptions } = context;
|
||||
execOptions.extendEnv ??= false;
|
||||
// Default to true to match execa's default behavior
|
||||
execOptions.extendEnv ??= true;
|
||||
execOptions.shell ??= 'bash';
|
||||
|
||||
// Ensure /usr/local/bin is in PATH for Node.js
|
||||
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
|
||||
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
|
||||
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
|
||||
|
||||
// Always ensure PM2_HOME is set in the environment for every PM2 command
|
||||
execOptions.env = {
|
||||
...execOptions.env,
|
||||
PM2_HOME,
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
@@ -100,8 +114,20 @@ export class PM2Service {
|
||||
|
||||
/**
|
||||
* Ensures that the dependencies necessary for PM2 to start and operate are present.
|
||||
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
|
||||
*/
|
||||
async ensurePm2Dependencies() {
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
try {
|
||||
// Create logs directory
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
|
||||
// PM2 automatically creates and manages its home directory when the daemon starts
|
||||
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
|
||||
} catch (error) {
|
||||
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
|
||||
this.logger.error(
|
||||
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,8 @@ export class RestartCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
|
||||
if (stderr) {
|
||||
|
||||
@@ -33,7 +33,8 @@ export class StartCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
if (stdout) {
|
||||
this.logger.log(stdout.toString());
|
||||
|
||||
@@ -8,6 +8,11 @@ export class StatusCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
await this.pm2.run({ tag: 'PM2 Status', stdio: 'inherit', raw: true }, 'status', 'unraid-api');
|
||||
await this.pm2.run(
|
||||
{ tag: 'PM2 Status', stdio: 'inherit', raw: true },
|
||||
'status',
|
||||
'unraid-api',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,8 @@ export class StopCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Delete', stdio: 'inherit' },
|
||||
'delete',
|
||||
ECOSYSTEM_PATH,
|
||||
'--no-autorestart'
|
||||
'--no-autorestart',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,37 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { API_VERSION } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
@Command({ name: 'version' })
|
||||
interface VersionOptions {
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({ name: 'version', description: 'Display API version information' })
|
||||
export class VersionCommand extends CommandRunner {
|
||||
constructor(private readonly logger: LogService) {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
|
||||
@Option({
|
||||
flags: '-j, --json',
|
||||
description: 'Output version information as JSON',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async run(passedParam: string[], options?: VersionOptions): Promise<void> {
|
||||
if (options?.json) {
|
||||
const [baseVersion, buildInfo] = API_VERSION.split('+');
|
||||
const versionInfo = {
|
||||
version: baseVersion || API_VERSION,
|
||||
build: buildInfo || undefined,
|
||||
combined: API_VERSION,
|
||||
};
|
||||
console.log(JSON.stringify(versionInfo));
|
||||
} else {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
|
||||
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
imports: [JobModule],
|
||||
providers: [WriteFlashFileService, LogRotateService],
|
||||
})
|
||||
export class CronModule {}
|
||||
|
||||
13
api/src/unraid-api/cron/job.module.ts
Normal file
13
api/src/unraid-api/cron/job.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
/**
|
||||
* Sets up common dependencies for initializing jobs (e.g. scheduler registry, cron jobs).
|
||||
*
|
||||
* Simplifies testing setup & application dependency tree by ensuring `forRoot` is called only once.
|
||||
*/
|
||||
@Module({
|
||||
imports: [ScheduleModule.forRoot()],
|
||||
exports: [ScheduleModule],
|
||||
})
|
||||
export class JobModule {}
|
||||
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Field, Mutation, ObjectType, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY, OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
describe('OmitIf Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
describe('OmitIf', () => {
|
||||
it('should set metadata when condition is true', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should not set metadata when condition is false', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(false)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should evaluate function conditions', () => {
|
||||
const mockCondition = vi.fn(() => true);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should evaluate function conditions that return false', () => {
|
||||
const mockCondition = vi.fn(() => false);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with environment variables', () => {
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(process.env.NODE_ENV === 'production')
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with NestJS GraphQL decorators', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Query(() => String)
|
||||
omittedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Query(() => String)
|
||||
includedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedQuery);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedQuery);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Mutation(() => String)
|
||||
omittedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Mutation(() => String)
|
||||
includedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedMutation);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedMutation);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@ObjectType()
|
||||
class TestType {
|
||||
@Field()
|
||||
id: string = '';
|
||||
}
|
||||
|
||||
@Resolver(() => TestType)
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@ResolveField(() => String)
|
||||
omittedField() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@ResolveField(() => String)
|
||||
includedField() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedField);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedField);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
import { Extensions } from '@nestjs/graphql';
|
||||
|
||||
import { MapperKind, mapSchema } from '@graphql-tools/utils';
|
||||
import { GraphQLFieldConfig, GraphQLSchema } from 'graphql';
|
||||
|
||||
export const OMIT_IF_METADATA_KEY = 'omitIf';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally omits a GraphQL field/query/mutation based on a condition.
|
||||
* The field will only be omitted from the schema when the condition evaluates to true.
|
||||
*
|
||||
* @param condition - If the condition evaluates to true, the field will be omitted from the schema
|
||||
* @returns A decorator that wraps the target field/query/mutation
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @OmitIf(process.env.NODE_ENV === 'production')
|
||||
* @Query(() => String)
|
||||
* async debugQuery() {
|
||||
* return 'This query is omitted in production';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function OmitIf(condition: boolean | (() => boolean)): MethodDecorator & PropertyDecorator {
|
||||
const shouldOmit = typeof condition === 'function' ? condition() : condition;
|
||||
|
||||
return (target: object, propertyKey?: string | symbol, descriptor?: PropertyDescriptor) => {
|
||||
if (shouldOmit) {
|
||||
SetMetadata(OMIT_IF_METADATA_KEY, true)(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
Extensions({ omitIf: true })(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema transformer that omits fields/queries/mutations based on the OmitIf decorator.
|
||||
* @param schema - The GraphQL schema to transform
|
||||
* @returns The transformed GraphQL schema
|
||||
*/
|
||||
export function omitIfSchemaTransformer(schema: GraphQLSchema): GraphQLSchema {
|
||||
return mapSchema(schema, {
|
||||
[MapperKind.OBJECT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.ROOT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
});
|
||||
}
|
||||
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
// fixme: types don't sync with mocks, and there's no override to simplify testing.
|
||||
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
|
||||
// Mock the FeatureFlags
|
||||
vi.mock('@app/consts.js', () => ({
|
||||
FeatureFlags: Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
ENABLE_EXPERIMENTAL_FEATURE: true,
|
||||
ENABLE_DEBUG_MODE: false,
|
||||
ENABLE_BETA_FEATURES: true,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('UseFeatureFlag Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should omit field when feature flag is false', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBe(true); // Should be omitted because flag is false
|
||||
});
|
||||
|
||||
it('should include field when feature flag is true', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBeUndefined(); // Should not be omitted because flag is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('With different decorator types', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
dockerMutation() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Mutation(() => String)
|
||||
experimentalMutation() {
|
||||
return 'experimental';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const dockerMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.dockerMutation);
|
||||
const experimentalMetadata = reflector.get(
|
||||
OMIT_IF_METADATA_KEY,
|
||||
instance.experimentalMutation
|
||||
);
|
||||
|
||||
expect(dockerMetadata).toBe(true); // ENABLE_NEXT_DOCKER_RELEASE is false
|
||||
expect(experimentalMetadata).toBeUndefined(); // ENABLE_EXPERIMENTAL_FEATURE is true
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@ResolveField(() => String)
|
||||
debugField() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => String)
|
||||
betaField() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugField);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaField);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple decorators on same class', () => {
|
||||
it('should handle multiple feature flags independently', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type safety', () => {
|
||||
it('should only accept valid feature flag keys', () => {
|
||||
// This test verifies TypeScript compile-time type safety
|
||||
// The following would cause a TypeScript error if uncommented:
|
||||
// @UseFeatureFlag('INVALID_FLAG')
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
validQuery() {
|
||||
return 'valid';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
expect(instance.validQuery).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration scenarios', () => {
|
||||
it('should work correctly with other decorators', () => {
|
||||
const customDecorator = (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: PropertyDescriptor
|
||||
) => {
|
||||
Reflect.defineMetadata('custom', true, target, propertyKey);
|
||||
return descriptor;
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@customDecorator
|
||||
@Query(() => String)
|
||||
multiDecoratorQuery() {
|
||||
return 'multi';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omitMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.multiDecoratorQuery);
|
||||
const customMetadata = Reflect.getMetadata('custom', instance, 'multiDecoratorQuery');
|
||||
|
||||
expect(omitMetadata).toBe(true);
|
||||
expect(customMetadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should maintain correct decorator order', () => {
|
||||
const orderTracker: string[] = [];
|
||||
|
||||
const trackingDecorator = (name: string) => {
|
||||
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
|
||||
orderTracker.push(name);
|
||||
return descriptor;
|
||||
};
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@trackingDecorator('first')
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@trackingDecorator('last')
|
||||
@Query(() => String)
|
||||
orderedQuery() {
|
||||
return 'ordered';
|
||||
}
|
||||
}
|
||||
|
||||
// Decorators are applied bottom-up
|
||||
expect(orderTracker).toEqual(['last', 'first']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world usage patterns', () => {
|
||||
it('should work with Docker resolver pattern', () => {
|
||||
@Resolver()
|
||||
class DockerResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async createDockerFolder(name: string) {
|
||||
return `Created folder: ${name}`;
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async deleteDockerEntries(entryIds: string[]) {
|
||||
return `Deleted entries: ${entryIds.join(', ')}`;
|
||||
}
|
||||
|
||||
@Query(() => String)
|
||||
async getDockerInfo() {
|
||||
return 'Docker info';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new DockerResolver();
|
||||
|
||||
// Feature flag is false, so these should be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.createDockerFolder)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.deleteDockerEntries)).toBe(true);
|
||||
|
||||
// No feature flag, so this should not be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.getDockerInfo)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle mixed feature flags in same resolver', () => {
|
||||
@Resolver()
|
||||
class MixedResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Mutation(() => String)
|
||||
betaMutation() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new MixedResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaMutation)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally includes a GraphQL field/query/mutation based on a feature flag.
|
||||
* The field will only be included in the schema when the feature flag is enabled.
|
||||
*
|
||||
* @param flagKey - The key of the feature flag in FeatureFlags
|
||||
* @returns A decorator that wraps OmitIf
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
* @Mutation(() => String)
|
||||
* async experimentalMutation() {
|
||||
* return 'This mutation is only available when ENABLE_NEXT_DOCKER_RELEASE is true';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function UseFeatureFlag(flagKey: keyof typeof FeatureFlags): MethodDecorator & PropertyDecorator {
|
||||
return OmitIf(!FeatureFlags[flagKey]);
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import { NoUnusedVariablesRule } from 'graphql';
|
||||
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { omitIfSchemaTransformer } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
// Import enum registrations to ensure they're registered with GraphQL
|
||||
import '@app/unraid-api/graph/auth/auth-action.enum.js';
|
||||
@@ -64,7 +65,12 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
},
|
||||
// Only add transform when not in test environment to avoid GraphQL version conflicts
|
||||
transformSchema:
|
||||
process.env.NODE_ENV === 'test' ? undefined : usePermissionsSchemaTransformer,
|
||||
process.env.NODE_ENV === 'test'
|
||||
? undefined
|
||||
: (schema) => {
|
||||
const schemaWithPermissions = usePermissionsSchemaTransformer(schema);
|
||||
return omitIfSchemaTransformer(schemaWithPermissions);
|
||||
},
|
||||
validationRules: [NoUnusedVariablesRule],
|
||||
};
|
||||
},
|
||||
|
||||
@@ -126,6 +126,9 @@ export class ArrayDisk extends Node {
|
||||
|
||||
@Field(() => ArrayDiskFsColor, { nullable: true })
|
||||
color?: ArrayDiskFsColor | null;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, description: 'Whether the disk is currently spinning' })
|
||||
isSpinning?: boolean | null;
|
||||
}
|
||||
|
||||
@ObjectType({
|
||||
|
||||
@@ -3,7 +3,15 @@ import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsArray, IsEnum, IsNumber, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import {
|
||||
IsArray,
|
||||
IsBoolean,
|
||||
IsEnum,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
|
||||
export enum DiskFsType {
|
||||
XFS = 'XFS',
|
||||
@@ -136,4 +144,8 @@ export class Disk extends Node {
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => DiskPartition)
|
||||
partitions!: DiskPartition[];
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the disk is spinning or not' })
|
||||
@IsBoolean()
|
||||
isSpinning!: boolean;
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ describe('DisksResolver', () => {
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: -1,
|
||||
partitions: [],
|
||||
isSpinning: false,
|
||||
},
|
||||
];
|
||||
mockDisksService.getDisks.mockResolvedValue(mockResult);
|
||||
@@ -92,6 +93,7 @@ describe('DisksResolver', () => {
|
||||
const mockDisk: Disk = {
|
||||
id: 'SERIAL123',
|
||||
device: '/dev/sda',
|
||||
isSpinning: false,
|
||||
type: 'SSD',
|
||||
name: 'Samsung SSD 860 EVO 1TB',
|
||||
vendor: 'Samsung',
|
||||
|
||||
@@ -33,4 +33,9 @@ export class DisksResolver {
|
||||
public async temperature(@Parent() disk: Disk) {
|
||||
return this.disksService.getTemperature(disk.device);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean)
|
||||
public async isSpinning(@Parent() disk: Disk) {
|
||||
return disk.isSpinning;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
// Vitest imports
|
||||
import { beforeEach, describe, expect, it, Mock, MockedFunction, vi } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
ArrayDisk,
|
||||
ArrayDiskStatus,
|
||||
ArrayDiskType,
|
||||
} from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -33,6 +39,86 @@ const mockBatchProcess = batchProcess as any;
|
||||
|
||||
describe('DisksService', () => {
|
||||
let service: DisksService;
|
||||
let configService: ConfigService;
|
||||
|
||||
// Mock ArrayDisk data from state
|
||||
const mockArrayDisks: ArrayDisk[] = [
|
||||
{
|
||||
id: 'S4ENNF0N123456',
|
||||
device: 'sda',
|
||||
name: 'cache',
|
||||
size: 512110190592,
|
||||
idx: 30,
|
||||
type: ArrayDiskType.CACHE,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: null, // NVMe/SSD doesn't spin
|
||||
rotational: false,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 1000,
|
||||
numWrites: 2000,
|
||||
temp: 42,
|
||||
comment: 'NVMe Cache',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'btrfs',
|
||||
transport: 'nvme',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsUsed: null,
|
||||
},
|
||||
{
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: 'sdb',
|
||||
name: 'disk1',
|
||||
size: 4000787030016,
|
||||
idx: 1,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: true, // Currently spinning
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 5000,
|
||||
numWrites: 3000,
|
||||
temp: 35,
|
||||
comment: 'Data Disk 1',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 1000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 3000000000,
|
||||
},
|
||||
{
|
||||
id: 'WD-SPUNDOWN123',
|
||||
device: 'sdd',
|
||||
name: 'disk2',
|
||||
size: 4000787030016,
|
||||
idx: 2,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: false, // Spun down
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 3000,
|
||||
numWrites: 1000,
|
||||
temp: 30,
|
||||
comment: 'Data Disk 2 (spun down)',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 2000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 2000000000,
|
||||
},
|
||||
];
|
||||
|
||||
const mockDiskLayoutData: Systeminformation.DiskLayoutData[] = [
|
||||
{
|
||||
@@ -92,6 +178,25 @@ describe('DisksService', () => {
|
||||
smartStatus: 'unknown', // Simulate unknown status
|
||||
temperature: null,
|
||||
},
|
||||
{
|
||||
device: '/dev/sdd',
|
||||
type: 'HD',
|
||||
name: 'WD Spun Down',
|
||||
vendor: 'Western Digital',
|
||||
size: 4000787030016,
|
||||
bytesPerSector: 512,
|
||||
totalCylinders: 486401,
|
||||
totalHeads: 255,
|
||||
totalSectors: 7814037168,
|
||||
totalTracks: 124032255,
|
||||
tracksPerCylinder: 255,
|
||||
sectorsPerTrack: 63,
|
||||
firmwareRevision: '82.00A82',
|
||||
serialNum: 'WD-SPUNDOWN123',
|
||||
interfaceType: 'SATA',
|
||||
smartStatus: 'Ok',
|
||||
temperature: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockBlockDeviceData: Systeminformation.BlockDevicesData[] = [
|
||||
@@ -174,17 +279,50 @@ describe('DisksService', () => {
|
||||
protocol: 'SATA', // Assume SATA even if interface type unknown for disk
|
||||
identifier: '/dev/sdc1',
|
||||
},
|
||||
// Partition for sdd
|
||||
{
|
||||
name: 'sdd1',
|
||||
type: 'part',
|
||||
fsType: 'xfs',
|
||||
mount: '/mnt/disk2',
|
||||
size: 4000787030016,
|
||||
physical: 'HDD',
|
||||
uuid: 'UUID-SDD1',
|
||||
label: 'Data2',
|
||||
model: 'WD Spun Down',
|
||||
serial: 'WD-SPUNDOWN123',
|
||||
removable: false,
|
||||
protocol: 'SATA',
|
||||
identifier: '/dev/sdd1',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset mocks before each test using vi
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create mock ConfigService
|
||||
const mockConfigService = {
|
||||
get: vi.fn().mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return mockArrayDisks;
|
||||
}
|
||||
return defaultValue;
|
||||
}),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [DisksService],
|
||||
providers: [
|
||||
DisksService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DisksService>(DisksService);
|
||||
configService = module.get<ConfigService>(ConfigService);
|
||||
|
||||
// Setup default mock implementations
|
||||
mockDiskLayout.mockResolvedValue(mockDiskLayoutData);
|
||||
@@ -207,46 +345,112 @@ describe('DisksService', () => {
|
||||
// --- Test getDisks ---
|
||||
|
||||
describe('getDisks', () => {
|
||||
it('should return disks without temperature', async () => {
|
||||
it('should return disks with spinning state from store', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
expect(mockDiskLayout).toHaveBeenCalledTimes(1);
|
||||
expect(mockBlockDevices).toHaveBeenCalledTimes(1);
|
||||
expect(mockExeca).not.toHaveBeenCalled(); // Temperature should not be fetched
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1); // Still uses batchProcess for parsing
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
expect(disks[0]).toMatchObject({
|
||||
id: 'S4ENNF0N123456',
|
||||
device: '/dev/sda',
|
||||
type: 'HD',
|
||||
name: 'SAMSUNG MZVLB512HBJQ-000L7',
|
||||
vendor: 'Samsung',
|
||||
size: 512110190592,
|
||||
interfaceType: DiskInterfaceType.PCIE,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null, // Temperature is now null by default
|
||||
partitions: [
|
||||
{ name: 'sda1', fsType: DiskFsType.VFAT, size: 536870912 },
|
||||
{ name: 'sda2', fsType: DiskFsType.EXT4, size: 511560000000 },
|
||||
],
|
||||
|
||||
// Check NVMe disk with null spinning state
|
||||
const nvmeDisk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(nvmeDisk).toBeDefined();
|
||||
expect(nvmeDisk?.isSpinning).toBe(false); // null from state defaults to false
|
||||
expect(nvmeDisk?.interfaceType).toBe(DiskInterfaceType.PCIE);
|
||||
expect(nvmeDisk?.smartStatus).toBe(DiskSmartStatus.OK);
|
||||
expect(nvmeDisk?.partitions).toHaveLength(2);
|
||||
|
||||
// Check spinning disk
|
||||
const spinningDisk = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(spinningDisk).toBeDefined();
|
||||
expect(spinningDisk?.isSpinning).toBe(true); // From state
|
||||
expect(spinningDisk?.interfaceType).toBe(DiskInterfaceType.SATA);
|
||||
|
||||
// Check spun down disk
|
||||
const spunDownDisk = disks.find((d) => d.id === 'WD-SPUNDOWN123');
|
||||
expect(spunDownDisk).toBeDefined();
|
||||
expect(spunDownDisk?.isSpinning).toBe(false); // From state
|
||||
|
||||
// Check disk not in state (defaults to not spinning)
|
||||
const unknownDisk = disks.find((d) => d.id === 'OTHER-SERIAL-123');
|
||||
expect(unknownDisk).toBeDefined();
|
||||
expect(unknownDisk?.isSpinning).toBe(false); // Not in state, defaults to false
|
||||
expect(unknownDisk?.interfaceType).toBe(DiskInterfaceType.UNKNOWN);
|
||||
expect(unknownDisk?.smartStatus).toBe(DiskSmartStatus.UNKNOWN);
|
||||
});
|
||||
|
||||
it('should handle empty state gracefully', async () => {
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return [];
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
expect(disks[1]).toMatchObject({
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: '/dev/sdb',
|
||||
interfaceType: DiskInterfaceType.SATA,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdb1', fsType: DiskFsType.XFS, size: 4000787030016 }],
|
||||
|
||||
const disks = await service.getDisks();
|
||||
|
||||
// All disks should default to not spinning when state is empty
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
disks.forEach((disk) => {
|
||||
expect(disk.isSpinning).toBe(false);
|
||||
});
|
||||
expect(disks[2]).toMatchObject({
|
||||
id: 'OTHER-SERIAL-123',
|
||||
device: '/dev/sdc',
|
||||
interfaceType: DiskInterfaceType.UNKNOWN,
|
||||
smartStatus: DiskSmartStatus.UNKNOWN,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdc1', fsType: DiskFsType.NTFS, size: 1000204886016 }],
|
||||
});
|
||||
|
||||
it('should handle trimmed serial numbers correctly', async () => {
|
||||
// Add disk with spaces in ID
|
||||
const disksWithSpaces = [...mockArrayDisks];
|
||||
disksWithSpaces[0] = {
|
||||
...disksWithSpaces[0],
|
||||
id: ' S4ENNF0N123456 ', // spaces around ID
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return disksWithSpaces;
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
|
||||
const disks = await service.getDisks();
|
||||
const disk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk?.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should correctly map partitions to disks', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
const disk1 = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(disk1?.partitions).toHaveLength(2);
|
||||
expect(disk1?.partitions[0]).toEqual({
|
||||
name: 'sda1',
|
||||
fsType: DiskFsType.VFAT,
|
||||
size: 536870912,
|
||||
});
|
||||
expect(disk1?.partitions[1]).toEqual({
|
||||
name: 'sda2',
|
||||
fsType: DiskFsType.EXT4,
|
||||
size: 511560000000,
|
||||
});
|
||||
|
||||
const disk2 = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(disk2?.partitions).toHaveLength(1);
|
||||
expect(disk2?.partitions[0]).toEqual({
|
||||
name: 'sdb1',
|
||||
fsType: DiskFsType.XFS,
|
||||
size: 4000787030016,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use ConfigService to get state data', async () => {
|
||||
await service.getDisks();
|
||||
|
||||
// Verify we're accessing the state through ConfigService
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
});
|
||||
|
||||
it('should handle empty disk layout or block devices', async () => {
|
||||
@@ -267,6 +471,31 @@ describe('DisksService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getDisk ---
|
||||
describe('getDisk', () => {
|
||||
it('should return a specific disk by id', async () => {
|
||||
const disk = await service.getDisk('S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('S4ENNF0N123456');
|
||||
expect(disk.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should return spinning disk correctly', async () => {
|
||||
const disk = await service.getDisk('WD-WCC7K7YL9876');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('WD-WCC7K7YL9876');
|
||||
expect(disk.isSpinning).toBe(true);
|
||||
});
|
||||
|
||||
it('should throw NotFoundException for non-existent disk', async () => {
|
||||
await expect(service.getDisk('NONEXISTENT')).rejects.toThrow(
|
||||
'Disk with id NONEXISTENT not found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getTemperature ---
|
||||
describe('getTemperature', () => {
|
||||
it('should return temperature for a disk', async () => {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
|
||||
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -14,6 +16,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class DisksService {
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
public async getTemperature(device: string): Promise<number | null> {
|
||||
try {
|
||||
const { stdout } = await execa('smartctl', ['-A', device]);
|
||||
@@ -51,7 +54,8 @@ export class DisksService {
|
||||
|
||||
private async parseDisk(
|
||||
disk: Systeminformation.DiskLayoutData,
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[]
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[],
|
||||
arrayDisks: ArrayDisk[]
|
||||
): Promise<Omit<Disk, 'temperature'>> {
|
||||
const partitions = partitionsToParse
|
||||
// Only get partitions from this disk
|
||||
@@ -115,6 +119,8 @@ export class DisksService {
|
||||
mappedInterfaceType = DiskInterfaceType.UNKNOWN;
|
||||
}
|
||||
|
||||
const arrayDisk = arrayDisks.find((d) => d.id.trim() === disk.serialNum.trim());
|
||||
|
||||
return {
|
||||
...disk,
|
||||
id: disk.serialNum, // Ensure id is set
|
||||
@@ -123,6 +129,7 @@ export class DisksService {
|
||||
DiskSmartStatus.UNKNOWN,
|
||||
interfaceType: mappedInterfaceType,
|
||||
partitions,
|
||||
isSpinning: arrayDisk?.isSpinning ?? false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -133,9 +140,9 @@ export class DisksService {
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
|
||||
const arrayDisks = this.configService.get<ArrayDisk[]>('store.emhttp.disks', []);
|
||||
const { data } = await batchProcess(await diskLayout(), async (disk) =>
|
||||
this.parseDisk(disk, partitions)
|
||||
this.parseDisk(disk, partitions, arrayDisks)
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import { Injectable, Logger, OnApplicationBootstrap } from '@nestjs/common';
|
||||
import { SchedulerRegistry, Timeout } from '@nestjs/schedule';
|
||||
|
||||
import { CronJob } from 'cron';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class ContainerStatusJob implements OnApplicationBootstrap {
|
||||
private readonly logger = new Logger(ContainerStatusJob.name);
|
||||
constructor(
|
||||
private readonly dockerManifestService: DockerManifestService,
|
||||
private readonly schedulerRegistry: SchedulerRegistry,
|
||||
private readonly dockerConfigService: DockerConfigService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
|
||||
*/
|
||||
onApplicationBootstrap() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
|
||||
const cronJob = CronJob.from({
|
||||
cronTime: cronExpression,
|
||||
onTick: () => {
|
||||
this.dockerManifestService.refreshDigests().catch((error) => {
|
||||
this.logger.warn(error, 'Failed to refresh container update status');
|
||||
});
|
||||
},
|
||||
start: true,
|
||||
});
|
||||
this.schedulerRegistry.addCronJob(ContainerStatusJob.name, cronJob);
|
||||
this.logger.verbose(
|
||||
`Initialized cron job for refreshing container update status: ${ContainerStatusJob.name}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh container digests 5 seconds after application start.
|
||||
*/
|
||||
@Timeout(5_000)
|
||||
async refreshContainerDigestsAfterStartup() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
await this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { Field, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
@ObjectType()
|
||||
export class DockerConfig {
|
||||
@Field(() => String)
|
||||
updateCheckCronSchedule!: string;
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
|
||||
vi.mock('cron', () => ({
|
||||
validateCronExpression: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@app/unraid-api/graph/resolvers/validation.utils.js', () => ({
|
||||
validateObject: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('DockerConfigService - validate', () => {
|
||||
let service: DockerConfigService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
DockerConfigService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DockerConfigService>(DockerConfigService);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should validate and return docker config for valid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should validate and return docker config for predefined cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
const validatedConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith(CronExpression.EVERY_DAY_AT_6AM);
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should throw AppError for invalid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
const validatedConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: invalid-cron')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('invalid-cron');
|
||||
});
|
||||
|
||||
it('should throw AppError for empty cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: ')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('should throw AppError for malformed cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: * * * *')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('* * * *');
|
||||
});
|
||||
|
||||
it('should propagate validation errors from validateObject', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validationError = new ValidationError();
|
||||
validationError.property = 'updateCheckCronSchedule';
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
|
||||
vi.mocked(validateObject).mockRejectedValue(validationError);
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow();
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
});
|
||||
|
||||
it('should handle complex valid cron expressions', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 0,12 * * 1-5');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should handle input with extra properties', async () => {
|
||||
const inputConfig = {
|
||||
updateCheckCronSchedule: '0 6 * * *',
|
||||
extraProperty: 'should be ignored',
|
||||
};
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,59 +1,45 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { validateCronExpression } from 'cron';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfig } from '@app/unraid-api/graph/resolvers/docker/docker-config.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
return 'docker.config.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
defaultConfig(): DockerConfig {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
async validate(config: object): Promise<DockerConfig> {
|
||||
const dockerConfig = await validateObject(DockerConfig, config);
|
||||
const cronExpression = validateCronExpression(dockerConfig.updateCheckCronSchedule);
|
||||
if (!cronExpression.valid) {
|
||||
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
|
||||
}
|
||||
return organizer;
|
||||
return dockerConfig;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
|
||||
@Resolver(() => DockerContainer)
|
||||
export class DockerContainerResolver {
|
||||
private readonly logger = new Logger(DockerContainerResolver.name);
|
||||
constructor(private readonly dockerManifestService: DockerManifestService) {}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isUpdateAvailable(@Parent() container: DockerContainer) {
|
||||
try {
|
||||
return await this.dockerManifestService.isUpdateAvailableCached(container.image);
|
||||
} catch (error) {
|
||||
this.logger.error(error);
|
||||
throw new AppError('Failed to read cached update status. See graphql-api.log for details.');
|
||||
}
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isRebuildReady(@Parent() container: DockerContainer) {
|
||||
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
public async refreshDockerDigests() {
|
||||
return this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { AsyncMutex } from '@unraid/shared/util/processing.js';
|
||||
|
||||
import { docker } from '@app/core/utils/index.js';
|
||||
import {
|
||||
CachedStatusEntry,
|
||||
DockerPhpService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerManifestService {
|
||||
constructor(private readonly dockerPhpService: DockerPhpService) {}
|
||||
|
||||
private readonly refreshDigestsMutex = new AsyncMutex(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp();
|
||||
});
|
||||
|
||||
/**
|
||||
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
|
||||
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
|
||||
* @param dockerUpdatePath - Optional path to the DockerUpdate.php file. If not provided, the default path will be used.
|
||||
* @returns True if the digests were refreshed, false if the operation failed
|
||||
*/
|
||||
async refreshDigests(mutex = this.refreshDigestsMutex, dockerUpdatePath?: string) {
|
||||
return mutex.do(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp(dockerUpdatePath);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an update is available for a given container image.
|
||||
* @param imageRef - The image reference to check, e.g. "unraid/baseimage:latest". If no tag is provided, "latest" is assumed, following the webgui's implementation.
|
||||
* @param cacheData read from /var/lib/docker/unraid-update-status.json by default
|
||||
* @returns True if an update is available, false if not, or null if the status is unknown
|
||||
*/
|
||||
async isUpdateAvailableCached(imageRef: string, cacheData?: Record<string, CachedStatusEntry>) {
|
||||
let taggedRef = imageRef;
|
||||
if (!taggedRef.includes(':')) taggedRef += ':latest';
|
||||
|
||||
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
|
||||
const containerData = cacheData[taggedRef];
|
||||
if (!containerData) return null;
|
||||
return containerData.status?.toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a container is rebuild ready.
|
||||
* @param networkMode - The network mode of the container, e.g. "container:unraid/baseimage:latest".
|
||||
* @returns True if the container is rebuild ready, false if not
|
||||
*/
|
||||
async isRebuildReady(networkMode?: string) {
|
||||
if (!networkMode || !networkMode.startsWith('container:')) return false;
|
||||
const target = networkMode.slice('container:'.length);
|
||||
try {
|
||||
await docker.getContainer(target).inspect();
|
||||
return false;
|
||||
} catch {
|
||||
return true; // unresolved target -> ':???' equivalent
|
||||
}
|
||||
}
|
||||
}
|
||||
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
import { phpLoader } from '@app/core/utils/plugins/php-loader.js';
|
||||
import {
|
||||
ExplicitStatusItem,
|
||||
UpdateStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
type StatusItem = { name: string; updateStatus: 0 | 1 | 2 | 3 };
|
||||
|
||||
/**
|
||||
* These types reflect the structure of the /var/lib/docker/unraid-update-status.json file,
|
||||
* which is not controlled by the Unraid API.
|
||||
*/
|
||||
const CachedStatusEntrySchema = z.object({
|
||||
/** sha256 digest - "sha256:..." */
|
||||
local: z.string(),
|
||||
/** sha256 digest - "sha256:..." */
|
||||
remote: z.string(),
|
||||
/** whether update is available (true), not available (false), or unknown (null) */
|
||||
status: z.enum(['true', 'false']).nullable(),
|
||||
});
|
||||
const CachedStatusSchema = z.record(z.string(), CachedStatusEntrySchema);
|
||||
export type CachedStatusEntry = z.infer<typeof CachedStatusEntrySchema>;
|
||||
|
||||
@Injectable()
|
||||
export class DockerPhpService {
|
||||
private readonly logger = new Logger(DockerPhpService.name);
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Reads JSON from a file containing cached update status.
|
||||
* If the file does not exist, an empty object is returned.
|
||||
* @param cacheFile
|
||||
* @returns
|
||||
*/
|
||||
async readCachedUpdateStatus(
|
||||
cacheFile = '/var/lib/docker/unraid-update-status.json'
|
||||
): Promise<Record<string, CachedStatusEntry>> {
|
||||
try {
|
||||
const cache = await readFile(cacheFile, 'utf8');
|
||||
const cacheData = JSON.parse(cache);
|
||||
const { success, data } = CachedStatusSchema.safeParse(cacheData);
|
||||
if (success) return data;
|
||||
this.logger.warn(cacheData, 'Invalid cached update status');
|
||||
return {};
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Failed to read cached update status');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Refresh Container Digests
|
||||
*------------------------**/
|
||||
|
||||
/**
|
||||
* Recomputes local/remote digests by triggering `DockerTemplates->getAllInfo(true)` via DockerUpdate.php
|
||||
* @param dockerUpdatePath - Path to the DockerUpdate.php file
|
||||
* @returns True if the digests were refreshed, false if the file is not found or the operation failed
|
||||
*/
|
||||
async refreshDigestsViaPhp(
|
||||
dockerUpdatePath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerUpdate.php'
|
||||
) {
|
||||
try {
|
||||
await phpLoader({
|
||||
file: dockerUpdatePath,
|
||||
method: 'GET',
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
// ignore; offline may keep remote as 'undef'
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Parse Container Statuses
|
||||
*------------------------**/
|
||||
|
||||
private parseStatusesFromDockerPush(js: string): ExplicitStatusItem[] {
|
||||
const matches = parseDockerPushCalls(js);
|
||||
return matches.map(({ name, updateStatus }) => ({
|
||||
name,
|
||||
updateStatus: this.updateStatusToString(updateStatus as StatusItem['updateStatus']),
|
||||
}));
|
||||
}
|
||||
|
||||
private updateStatusToString(updateStatus: 0): UpdateStatus.UP_TO_DATE;
|
||||
private updateStatusToString(updateStatus: 1): UpdateStatus.UPDATE_AVAILABLE;
|
||||
private updateStatusToString(updateStatus: 2): UpdateStatus.REBUILD_READY;
|
||||
private updateStatusToString(updateStatus: 3): UpdateStatus.UNKNOWN;
|
||||
// prettier-ignore
|
||||
private updateStatusToString(updateStatus: StatusItem['updateStatus']): ExplicitStatusItem['updateStatus'];
|
||||
private updateStatusToString(
|
||||
updateStatus: StatusItem['updateStatus']
|
||||
): ExplicitStatusItem['updateStatus'] {
|
||||
switch (updateStatus) {
|
||||
case 0:
|
||||
return UpdateStatus.UP_TO_DATE;
|
||||
case 1:
|
||||
return UpdateStatus.UPDATE_AVAILABLE;
|
||||
case 2:
|
||||
return UpdateStatus.REBUILD_READY;
|
||||
default:
|
||||
return UpdateStatus.UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the update statuses for all containers by triggering `DockerTemplates->getAllInfo(true)` via DockerContainers.php
|
||||
* @param dockerContainersPath - Path to the DockerContainers.php file
|
||||
* @returns The update statuses for all containers
|
||||
*/
|
||||
async getContainerUpdateStatuses(
|
||||
dockerContainersPath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerContainers.php'
|
||||
): Promise<ExplicitStatusItem[]> {
|
||||
const stdout = await phpLoader({
|
||||
file: dockerContainersPath,
|
||||
method: 'GET',
|
||||
});
|
||||
const parts = stdout.split('\0'); // [html, "docker.push(...)", busyFlag]
|
||||
const js = parts[1] || '';
|
||||
return this.parseStatusesFromDockerPush(js);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
/**
|
||||
* Note that these values propagate down to API consumers, so be aware of breaking changes.
|
||||
*/
|
||||
export enum UpdateStatus {
|
||||
UP_TO_DATE = 'UP_TO_DATE',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
}
|
||||
|
||||
registerEnumType(UpdateStatus, {
|
||||
name: 'UpdateStatus',
|
||||
description: 'Update status of a container.',
|
||||
});
|
||||
|
||||
@ObjectType()
|
||||
export class ExplicitStatusItem {
|
||||
@Field(() => String)
|
||||
name!: string;
|
||||
|
||||
@Field(() => UpdateStatus)
|
||||
updateStatus!: UpdateStatus;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Field, ID, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
||||
import { GraphQLBigInt, GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
||||
|
||||
export enum ContainerPortType {
|
||||
TCP = 'TCP',
|
||||
@@ -89,7 +89,10 @@ export class DockerContainer extends Node {
|
||||
@Field(() => [ContainerPort])
|
||||
ports!: ContainerPort[];
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Total size of all the files in the container' })
|
||||
@Field(() => GraphQLBigInt, {
|
||||
nullable: true,
|
||||
description: 'Total size of all files in the container (in bytes)',
|
||||
})
|
||||
sizeRootFs?: number;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true })
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
describe('DockerModule', () => {
|
||||
it('should compile the module', async () => {
|
||||
@@ -18,6 +19,8 @@ describe('DockerModule', () => {
|
||||
})
|
||||
.overrideProvider(DockerService)
|
||||
.useValue({ getDockerClient: vi.fn() })
|
||||
.overrideProvider(DockerOrganizerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.overrideProvider(DockerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.compile();
|
||||
@@ -61,6 +64,7 @@ describe('DockerModule', () => {
|
||||
DockerResolver,
|
||||
{ provide: DockerService, useValue: {} },
|
||||
{ provide: DockerOrganizerService, useValue: {} },
|
||||
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
|
||||
@@ -1,22 +1,36 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [JobModule],
|
||||
providers: [
|
||||
// Services
|
||||
DockerService,
|
||||
DockerConfigService,
|
||||
DockerOrganizerConfigService,
|
||||
DockerOrganizerService,
|
||||
DockerManifestService,
|
||||
DockerPhpService,
|
||||
DockerConfigService,
|
||||
// DockerEventService,
|
||||
|
||||
// Jobs
|
||||
ContainerStatusJob,
|
||||
|
||||
// Resolvers
|
||||
DockerResolver,
|
||||
DockerMutationsResolver,
|
||||
DockerContainerResolver,
|
||||
],
|
||||
exports: [DockerService],
|
||||
})
|
||||
|
||||
@@ -3,10 +3,18 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
vi.mock('@app/unraid-api/utils/graphql-field-helper.js', () => ({
|
||||
GraphQLFieldHelper: {
|
||||
isFieldRequested: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('DockerResolver', () => {
|
||||
let resolver: DockerResolver;
|
||||
@@ -26,7 +34,13 @@ describe('DockerResolver', () => {
|
||||
{
|
||||
provide: DockerOrganizerService,
|
||||
useValue: {
|
||||
getResolvedOrganizer: vi.fn(),
|
||||
resolveOrganizer: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerPhpService,
|
||||
useValue: {
|
||||
getContainerUpdateStatuses: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -34,6 +48,9 @@ describe('DockerResolver', () => {
|
||||
|
||||
resolver = module.get<DockerResolver>(DockerResolver);
|
||||
dockerService = module.get<DockerService>(DockerService);
|
||||
|
||||
// Reset mocks before each test
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
@@ -73,9 +90,75 @@ describe('DockerResolver', () => {
|
||||
},
|
||||
];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const result = await resolver.containers(false);
|
||||
const mockInfo = {} as any;
|
||||
|
||||
const result = await resolver.containers(false, mockInfo);
|
||||
expect(result).toEqual(mockContainers);
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false });
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||
});
|
||||
|
||||
it('should request size when sizeRootFs field is requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [
|
||||
{
|
||||
id: '1',
|
||||
autoStart: false,
|
||||
command: 'test',
|
||||
names: ['test-container'],
|
||||
created: 1234567890,
|
||||
image: 'test-image',
|
||||
imageId: 'test-image-id',
|
||||
ports: [],
|
||||
sizeRootFs: 1024000,
|
||||
state: ContainerState.EXITED,
|
||||
status: 'Exited',
|
||||
},
|
||||
];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
const result = await resolver.containers(false, mockInfo);
|
||||
expect(result).toEqual(mockContainers);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||
});
|
||||
|
||||
it('should request size when GraphQLFieldHelper indicates sizeRootFs is requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(false, mockInfo);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||
});
|
||||
|
||||
it('should not request size when GraphQLFieldHelper indicates sizeRootFs is not requested', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(false, mockInfo);
|
||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||
});
|
||||
|
||||
it('should handle skipCache parameter', async () => {
|
||||
const mockContainers: DockerContainer[] = [];
|
||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
||||
|
||||
const mockInfo = {} as any;
|
||||
|
||||
await resolver.containers(true, mockInfo);
|
||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: true, size: false });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,23 +1,29 @@
|
||||
import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
import { Args, Info, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import type { GraphQLResolveInfo } from 'graphql';
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import {
|
||||
Docker,
|
||||
DockerContainer,
|
||||
DockerNetwork,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1, ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
@Resolver(() => Docker)
|
||||
export class DockerResolver {
|
||||
constructor(
|
||||
private readonly dockerService: DockerService,
|
||||
private readonly dockerOrganizerService: DockerOrganizerService
|
||||
private readonly dockerOrganizerService: DockerOrganizerService,
|
||||
private readonly dockerPhpService: DockerPhpService
|
||||
) {}
|
||||
|
||||
@UsePermissions({
|
||||
@@ -37,9 +43,11 @@ export class DockerResolver {
|
||||
})
|
||||
@ResolveField(() => [DockerContainer])
|
||||
public async containers(
|
||||
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean
|
||||
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean,
|
||||
@Info() info: GraphQLResolveInfo
|
||||
) {
|
||||
return this.dockerService.getContainers({ skipCache });
|
||||
const requestsSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeRootFs');
|
||||
return this.dockerService.getContainers({ skipCache, size: requestsSize });
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
@@ -53,6 +61,7 @@ export class DockerResolver {
|
||||
return this.dockerService.getNetworks({ skipCache });
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -62,6 +71,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer();
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -80,6 +90,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -96,6 +107,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -108,6 +120,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -123,4 +136,14 @@ export class DockerResolver {
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => [ExplicitStatusItem])
|
||||
public async containerUpdateStatuses() {
|
||||
return this.dockerPhpService.getContainerUpdateStatuses();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,6 +109,65 @@ describe('DockerService', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use separate cache keys for containers with and without size', async () => {
|
||||
const mockContainersWithoutSize = [
|
||||
{
|
||||
Id: 'abc123',
|
||||
Names: ['/test-container'],
|
||||
Image: 'test-image',
|
||||
ImageID: 'test-image-id',
|
||||
Command: 'test',
|
||||
Created: 1234567890,
|
||||
State: 'exited',
|
||||
Status: 'Exited',
|
||||
Ports: [],
|
||||
Labels: {},
|
||||
HostConfig: { NetworkMode: 'bridge' },
|
||||
NetworkSettings: {},
|
||||
Mounts: [],
|
||||
},
|
||||
];
|
||||
|
||||
const mockContainersWithSize = [
|
||||
{
|
||||
Id: 'abc123',
|
||||
Names: ['/test-container'],
|
||||
Image: 'test-image',
|
||||
ImageID: 'test-image-id',
|
||||
Command: 'test',
|
||||
Created: 1234567890,
|
||||
State: 'exited',
|
||||
Status: 'Exited',
|
||||
Ports: [],
|
||||
Labels: {},
|
||||
HostConfig: { NetworkMode: 'bridge' },
|
||||
NetworkSettings: {},
|
||||
Mounts: [],
|
||||
SizeRootFs: 1024000,
|
||||
},
|
||||
];
|
||||
|
||||
// First call without size
|
||||
mockListContainers.mockResolvedValue(mockContainersWithoutSize);
|
||||
mockCacheManager.get.mockResolvedValue(undefined);
|
||||
|
||||
await service.getContainers({ size: false });
|
||||
|
||||
expect(mockCacheManager.set).toHaveBeenCalledWith('docker_containers', expect.any(Array), 60000);
|
||||
|
||||
// Second call with size
|
||||
mockListContainers.mockResolvedValue(mockContainersWithSize);
|
||||
mockCacheManager.get.mockResolvedValue(undefined);
|
||||
|
||||
await service.getContainers({ size: true });
|
||||
|
||||
expect(mockCacheManager.set).toHaveBeenCalledWith(
|
||||
'docker_containers_with_size',
|
||||
expect.any(Array),
|
||||
60000
|
||||
);
|
||||
});
|
||||
|
||||
it('should get containers', async () => {
|
||||
const mockContainers = [
|
||||
{
|
||||
@@ -159,7 +218,7 @@ describe('DockerService', () => {
|
||||
|
||||
expect(mockListContainers).toHaveBeenCalledWith({
|
||||
all: true,
|
||||
size: true,
|
||||
size: false,
|
||||
});
|
||||
expect(mockCacheManager.set).toHaveBeenCalled(); // Ensure cache is set
|
||||
});
|
||||
|
||||
@@ -31,6 +31,7 @@ export class DockerService {
|
||||
private readonly logger = new Logger(DockerService.name);
|
||||
|
||||
public static readonly CONTAINER_CACHE_KEY = 'docker_containers';
|
||||
public static readonly CONTAINER_WITH_SIZE_CACHE_KEY = 'docker_containers_with_size';
|
||||
public static readonly NETWORK_CACHE_KEY = 'docker_networks';
|
||||
public static readonly CACHE_TTL_SECONDS = 60; // Cache for 60 seconds
|
||||
|
||||
@@ -71,6 +72,8 @@ export class DockerService {
|
||||
}
|
||||
|
||||
public transformContainer(container: Docker.ContainerInfo): DockerContainer {
|
||||
const sizeValue = (container as Docker.ContainerInfo & { SizeRootFs?: number }).SizeRootFs;
|
||||
|
||||
const transformed: DockerContainer = {
|
||||
id: container.Id,
|
||||
names: container.Names,
|
||||
@@ -86,7 +89,7 @@ export class DockerService {
|
||||
ContainerPortType[port.Type.toUpperCase() as keyof typeof ContainerPortType] ||
|
||||
ContainerPortType.TCP,
|
||||
})),
|
||||
sizeRootFs: undefined,
|
||||
sizeRootFs: sizeValue,
|
||||
labels: container.Labels ?? {},
|
||||
state:
|
||||
typeof container.State === 'string'
|
||||
@@ -109,21 +112,23 @@ export class DockerService {
|
||||
{
|
||||
skipCache = false,
|
||||
all = true,
|
||||
size = true,
|
||||
size = false,
|
||||
...listOptions
|
||||
}: Partial<ContainerListingOptions> = { skipCache: false }
|
||||
): Promise<DockerContainer[]> {
|
||||
const cacheKey = size
|
||||
? DockerService.CONTAINER_WITH_SIZE_CACHE_KEY
|
||||
: DockerService.CONTAINER_CACHE_KEY;
|
||||
|
||||
if (!skipCache) {
|
||||
const cachedContainers = await this.cacheManager.get<DockerContainer[]>(
|
||||
DockerService.CONTAINER_CACHE_KEY
|
||||
);
|
||||
const cachedContainers = await this.cacheManager.get<DockerContainer[]>(cacheKey);
|
||||
if (cachedContainers) {
|
||||
this.logger.debug('Using docker container cache');
|
||||
this.logger.debug(`Using docker container cache (${size ? 'with' : 'without'} size)`);
|
||||
return cachedContainers;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug('Updating docker container cache');
|
||||
this.logger.debug(`Updating docker container cache (${size ? 'with' : 'without'} size)`);
|
||||
const rawContainers =
|
||||
(await this.client
|
||||
.listContainers({
|
||||
@@ -136,11 +141,7 @@ export class DockerService {
|
||||
this.autoStarts = await this.getAutoStarts();
|
||||
const containers = rawContainers.map((container) => this.transformContainer(container));
|
||||
|
||||
await this.cacheManager.set(
|
||||
DockerService.CONTAINER_CACHE_KEY,
|
||||
containers,
|
||||
DockerService.CACHE_TTL_SECONDS * 1000
|
||||
);
|
||||
await this.cacheManager.set(cacheKey, containers, DockerService.CACHE_TTL_SECONDS * 1000);
|
||||
return containers;
|
||||
}
|
||||
|
||||
@@ -191,15 +192,18 @@ export class DockerService {
|
||||
}
|
||||
|
||||
public async clearContainerCache(): Promise<void> {
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug('Invalidated container cache due to external event.');
|
||||
await Promise.all([
|
||||
this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY),
|
||||
this.cacheManager.del(DockerService.CONTAINER_WITH_SIZE_CACHE_KEY),
|
||||
]);
|
||||
this.logger.debug('Invalidated container caches due to external event.');
|
||||
}
|
||||
|
||||
public async start(id: string): Promise<DockerContainer> {
|
||||
const container = this.client.getContainer(id);
|
||||
await container.start();
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug(`Invalidated container cache after starting ${id}`);
|
||||
await this.clearContainerCache();
|
||||
this.logger.debug(`Invalidated container caches after starting ${id}`);
|
||||
const containers = await this.getContainers({ skipCache: true });
|
||||
const updatedContainer = containers.find((c) => c.id === id);
|
||||
if (!updatedContainer) {
|
||||
@@ -213,8 +217,8 @@ export class DockerService {
|
||||
public async stop(id: string): Promise<DockerContainer> {
|
||||
const container = this.client.getContainer(id);
|
||||
await container.stop({ t: 10 });
|
||||
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||
this.logger.debug(`Invalidated container cache after stopping ${id}`);
|
||||
await this.clearContainerCache();
|
||||
this.logger.debug(`Invalidated container caches after stopping ${id}`);
|
||||
|
||||
let containers = await this.getContainers({ skipCache: true });
|
||||
let updatedContainer: DockerContainer | undefined;
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerOrganizerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
}
|
||||
return organizer;
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,17 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import {
|
||||
ContainerPortType,
|
||||
ContainerState,
|
||||
DockerContainer,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
describe('containerToResource', () => {
|
||||
@@ -138,7 +138,7 @@ describe('containerToResource', () => {
|
||||
|
||||
describe('DockerOrganizerService', () => {
|
||||
let service: DockerOrganizerService;
|
||||
let configService: DockerConfigService;
|
||||
let configService: DockerOrganizerConfigService;
|
||||
let dockerService: DockerService;
|
||||
|
||||
const mockOrganizer: OrganizerV1 = {
|
||||
@@ -178,7 +178,7 @@ describe('DockerOrganizerService', () => {
|
||||
providers: [
|
||||
DockerOrganizerService,
|
||||
{
|
||||
provide: DockerConfigService,
|
||||
provide: DockerOrganizerConfigService,
|
||||
useValue: {
|
||||
getConfig: vi.fn().mockImplementation(() => structuredClone(mockOrganizer)),
|
||||
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||
@@ -220,7 +220,7 @@ describe('DockerOrganizerService', () => {
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<DockerOrganizerService>(DockerOrganizerService);
|
||||
configService = moduleRef.get<DockerConfigService>(DockerConfigService);
|
||||
configService = moduleRef.get<DockerOrganizerConfigService>(DockerOrganizerConfigService);
|
||||
dockerService = moduleRef.get<DockerService>(DockerService);
|
||||
});
|
||||
|
||||
@@ -3,9 +3,9 @@ import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { ContainerListOptions } from 'dockerode';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
addMissingResourcesToView,
|
||||
createFolderInView,
|
||||
@@ -47,7 +47,7 @@ export function containerListToResourcesObject(containers: DockerContainer[]): O
|
||||
export class DockerOrganizerService {
|
||||
private readonly logger = new Logger(DockerOrganizerService.name);
|
||||
constructor(
|
||||
private readonly dockerConfigService: DockerConfigService,
|
||||
private readonly dockerConfigService: DockerOrganizerConfigService,
|
||||
private readonly dockerService: DockerService
|
||||
) {}
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { DockerPushMatch } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
describe('parseDockerPushCalls', () => {
|
||||
it('should extract name and update status from valid docker.push call', () => {
|
||||
const jsCode = "docker.push({name:'nginx',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle multiple docker.push calls in same string', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'nginx',update:1});
|
||||
docker.push({name:'mysql',update:0});
|
||||
docker.push({name:'redis',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'nginx', updateStatus: 1 },
|
||||
{ name: 'mysql', updateStatus: 0 },
|
||||
{ name: 'redis', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle docker.push calls with additional properties', () => {
|
||||
const jsCode =
|
||||
"docker.push({id:'123',name:'nginx',version:'latest',update:3,status:'running'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 3 }]);
|
||||
});
|
||||
|
||||
it('should handle different property order', () => {
|
||||
const jsCode = "docker.push({update:2,name:'postgres',id:'456'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'postgres', updateStatus: 2 }]);
|
||||
});
|
||||
|
||||
it('should handle container names with special characters', () => {
|
||||
const jsCode = "docker.push({name:'my-app_v2.0',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'my-app_v2.0', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle whitespace variations', () => {
|
||||
const jsCode = "docker.push({ name: 'nginx' , update: 1 });";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty string', () => {
|
||||
const result = parseDockerPushCalls('');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when no docker.push calls found', () => {
|
||||
const jsCode = "console.log('no docker calls here');";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should ignore malformed docker.push calls', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'valid',update:1});
|
||||
docker.push({name:'missing-update'});
|
||||
docker.push({update:2});
|
||||
docker.push({name:'another-valid',update:0});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'valid', updateStatus: 1 },
|
||||
{ name: 'another-valid', updateStatus: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle all valid update status values', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'container0',update:0});
|
||||
docker.push({name:'container1',update:1});
|
||||
docker.push({name:'container2',update:2});
|
||||
docker.push({name:'container3',update:3});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'container0', updateStatus: 0 },
|
||||
{ name: 'container1', updateStatus: 1 },
|
||||
{ name: 'container2', updateStatus: 2 },
|
||||
{ name: 'container3', updateStatus: 3 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle real-world example with HTML and multiple containers', () => {
|
||||
const jsCode = `
|
||||
<div>some html</div>
|
||||
docker.push({id:'abc123',name:'plex',version:'1.32',update:1,autostart:true});
|
||||
docker.push({id:'def456',name:'nextcloud',version:'latest',update:0,ports:'80:8080'});
|
||||
<script>more content</script>
|
||||
docker.push({id:'ghi789',name:'homeassistant',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'plex', updateStatus: 1 },
|
||||
{ name: 'nextcloud', updateStatus: 0 },
|
||||
{ name: 'homeassistant', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle nested braces in other properties', () => {
|
||||
const jsCode = 'docker.push({config:\'{"nested":"value"}\',name:\'test\',update:1});';
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'test', updateStatus: 1 }]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,24 @@
|
||||
export interface DockerPushMatch {
|
||||
name: string;
|
||||
updateStatus: number;
|
||||
}
|
||||
|
||||
export function parseDockerPushCalls(jsCode: string): DockerPushMatch[] {
|
||||
const dockerPushRegex = /docker\.push\(\{[^}]*(?:(?:[^{}]|{[^}]*})*)\}\);/g;
|
||||
const matches: DockerPushMatch[] = [];
|
||||
|
||||
for (const match of jsCode.matchAll(dockerPushRegex)) {
|
||||
const objectContent = match[0];
|
||||
|
||||
const nameMatch = objectContent.match(/name\s*:\s*'([^']+)'/);
|
||||
const updateMatch = objectContent.match(/update\s*:\s*(\d)/);
|
||||
|
||||
if (nameMatch && updateMatch) {
|
||||
const name = nameMatch[1];
|
||||
const updateStatus = Number(updateMatch[1]);
|
||||
matches.push({ name, updateStatus });
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
|
||||
import crypto from 'crypto';
|
||||
import { ChildProcess } from 'node:child_process';
|
||||
import { mkdir, rm, writeFile } from 'node:fs/promises';
|
||||
@@ -7,6 +7,7 @@ import { dirname, join } from 'node:path';
|
||||
import { execa } from 'execa';
|
||||
import got, { HTTPError } from 'got';
|
||||
import pRetry from 'p-retry';
|
||||
import semver from 'semver';
|
||||
|
||||
import { sanitizeParams } from '@app/core/log.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
@@ -25,7 +26,7 @@ import {
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
private isInitialized: boolean = false;
|
||||
private readonly logger = new Logger(RCloneApiService.name);
|
||||
private rcloneSocketPath: string = '';
|
||||
@@ -44,7 +45,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
return this.isInitialized;
|
||||
}
|
||||
|
||||
async onModuleInit(): Promise<void> {
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
// RClone startup disabled - early return
|
||||
if (ENVIRONMENT === 'production') {
|
||||
this.logger.debug('RClone startup is disabled');
|
||||
@@ -239,12 +240,41 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the RClone binary is available on the system
|
||||
* Checks if the RClone binary is available on the system and meets minimum version requirements
|
||||
*/
|
||||
private async checkRcloneBinaryExists(): Promise<boolean> {
|
||||
try {
|
||||
await execa('rclone', ['version']);
|
||||
this.logger.debug('RClone binary is available on the system.');
|
||||
const result = await execa('rclone', ['version']);
|
||||
const versionOutput = result.stdout.trim();
|
||||
|
||||
// Extract raw version string (format: "rclone vX.XX.X" or "rclone vX.XX.X-beta.X")
|
||||
const versionMatch = versionOutput.match(/rclone v([\d.\-\w]+)/);
|
||||
if (!versionMatch) {
|
||||
this.logger.error('Unable to parse RClone version from output');
|
||||
return false;
|
||||
}
|
||||
|
||||
const rawVersion = versionMatch[1];
|
||||
|
||||
// Use semver.coerce to get base semver from prerelease versions
|
||||
const coercedVersion = semver.coerce(rawVersion);
|
||||
if (!coercedVersion) {
|
||||
this.logger.error(`Failed to parse RClone version: raw="${rawVersion}"`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const minimumVersion = '1.70.0';
|
||||
|
||||
if (!semver.gte(coercedVersion, minimumVersion)) {
|
||||
this.logger.error(
|
||||
`RClone version ${rawVersion} (coerced: ${coercedVersion}) is too old. Minimum required version is ${minimumVersion}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`RClone binary is available on the system (version ${rawVersion}, coerced: ${coercedVersion}).`
|
||||
);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
|
||||
@@ -0,0 +1,216 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
vi.mock('openid-client');
|
||||
|
||||
describe('OidcClientConfigService - Cache Behavior', () => {
|
||||
let service: OidcClientConfigService;
|
||||
let validationService: OidcValidationService;
|
||||
|
||||
const createMockProvider = (port: number): OidcProvider => ({
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: `http://localhost:${port}`,
|
||||
scopes: ['openid', 'profile', 'email'],
|
||||
authorizationRules: [],
|
||||
});
|
||||
|
||||
const createMockConfiguration = (port: number) => {
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn(() => ({
|
||||
issuer: `http://localhost:${port}`,
|
||||
authorization_endpoint: `http://localhost:${port}/auth`,
|
||||
token_endpoint: `http://localhost:${port}/token`,
|
||||
jwks_uri: `http://localhost:${port}/jwks`,
|
||||
userinfo_endpoint: `http://localhost:${port}/userinfo`,
|
||||
})),
|
||||
};
|
||||
return mockConfig as unknown as client.Configuration;
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const mockConfigService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
};
|
||||
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcClientConfigService,
|
||||
OidcValidationService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcClientConfigService>(OidcClientConfigService);
|
||||
validationService = module.get<OidcValidationService>(OidcValidationService);
|
||||
});
|
||||
|
||||
describe('Configuration Caching', () => {
|
||||
it('should cache configuration on first call', async () => {
|
||||
const provider = createMockProvider(1029);
|
||||
const mockConfig = createMockConfiguration(1029);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery').mockResolvedValueOnce(mockConfig);
|
||||
|
||||
// First call
|
||||
const config1 = await service.getOrCreateConfig(provider);
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Second call with same provider ID should use cache
|
||||
const config2 = await service.getOrCreateConfig(provider);
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
expect(config2).toBe(config1);
|
||||
});
|
||||
|
||||
it('should return stale cached configuration when issuer changes without cache clear', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// Initial configuration on port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Update provider to port 1030 (simulating UI change)
|
||||
// Without clearing cache, it should still return the old cached config
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// THIS IS THE BUG: The service returns cached config for port 1029
|
||||
// even though the provider now has issuer on port 1030
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// performDiscovery should only be called once because cache is used
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return fresh configuration after cache is cleared', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// Initial configuration on port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Clear cache for the provider
|
||||
service.clearCache(provider1030.id);
|
||||
|
||||
// Now it should fetch fresh config for port 1030
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
|
||||
// performDiscovery should be called twice (once for each port)
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear all provider caches when clearCache is called without providerId', async () => {
|
||||
const provider1 = { ...createMockProvider(1029), id: 'provider1' };
|
||||
const provider2 = { ...createMockProvider(1030), id: 'provider2' };
|
||||
const mockConfig1 = createMockConfiguration(1029);
|
||||
const mockConfig2 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1)
|
||||
.mockResolvedValueOnce(mockConfig2)
|
||||
.mockResolvedValueOnce(mockConfig1)
|
||||
.mockResolvedValueOnce(mockConfig2);
|
||||
|
||||
// Cache both providers
|
||||
await service.getOrCreateConfig(provider1);
|
||||
await service.getOrCreateConfig(provider2);
|
||||
expect(service.getCacheSize()).toBe(2);
|
||||
|
||||
// Clear all caches
|
||||
service.clearCache();
|
||||
expect(service.getCacheSize()).toBe(0);
|
||||
|
||||
// Both should fetch fresh configs
|
||||
await service.getOrCreateConfig(provider1);
|
||||
await service.getOrCreateConfig(provider2);
|
||||
|
||||
// performDiscovery should be called 4 times total
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Manual Configuration Caching', () => {
|
||||
it('should cache manual configuration and exhibit same stale cache issue', async () => {
|
||||
const provider1029: OidcProvider = {
|
||||
id: 'manual-provider',
|
||||
name: 'Manual Provider',
|
||||
clientId: 'client-id',
|
||||
clientSecret: 'secret',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'http://localhost:1029/auth',
|
||||
tokenEndpoint: 'http://localhost:1029/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const provider1030: OidcProvider = {
|
||||
...provider1029,
|
||||
authorizationEndpoint: 'http://localhost:1030/auth',
|
||||
tokenEndpoint: 'http://localhost:1030/token',
|
||||
};
|
||||
|
||||
// Mock the client.Configuration constructor for manual configs
|
||||
const mockManualConfig1029 = createMockConfiguration(1029);
|
||||
const mockManualConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
let configCallCount = 0;
|
||||
vi.mocked(client.Configuration).mockImplementation(() => {
|
||||
configCallCount++;
|
||||
return configCallCount === 1 ? mockManualConfig1029 : mockManualConfig1030;
|
||||
});
|
||||
|
||||
vi.mocked(client.ClientSecretPost).mockReturnValue({} as any);
|
||||
vi.mocked(client.allowInsecureRequests).mockImplementation(() => {});
|
||||
|
||||
// First call with port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Update to port 1030 without clearing cache
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// BUG: Still returns cached config with port 1029
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Clear cache and try again
|
||||
service.clearCache(provider1030.id);
|
||||
const config3 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// Now it should return the updated config
|
||||
expect(config3.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcBaseModule } from '@app/unraid-api/graph/resolvers/sso/core/oidc-base.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [OidcBaseModule],
|
||||
imports: [forwardRef(() => OidcBaseModule)],
|
||||
providers: [OidcClientConfigService, OidcRedirectUriService],
|
||||
exports: [OidcClientConfigService, OidcRedirectUriService],
|
||||
})
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
|
||||
|
||||
import { OidcClientModule } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client.module.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [UserSettingsModule],
|
||||
imports: [UserSettingsModule, forwardRef(() => OidcClientModule)],
|
||||
providers: [OidcConfigPersistence, OidcValidationService],
|
||||
exports: [OidcConfigPersistence, OidcValidationService],
|
||||
})
|
||||
|
||||
@@ -0,0 +1,276 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
import * as fs from 'fs/promises';
|
||||
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
import * as client from 'openid-client';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
vi.mock('openid-client');
|
||||
vi.mock('fs/promises', () => ({
|
||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
||||
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||
stat: vi.fn().mockRejectedValue(new Error('File not found')),
|
||||
}));
|
||||
|
||||
describe('OIDC Config Cache Fix - Integration Test', () => {
|
||||
let configPersistence: OidcConfigPersistence;
|
||||
let clientConfigService: OidcClientConfigService;
|
||||
let mockConfigService: any;
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.PATHS_CONFIG;
|
||||
});
|
||||
|
||||
const createMockProvider = (port: number): OidcProvider => ({
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: `http://localhost:${port}`,
|
||||
scopes: ['openid', 'profile', 'email'],
|
||||
authorizationRules: [
|
||||
{
|
||||
claim: 'email',
|
||||
operator: 'endsWith' as any,
|
||||
value: ['@example.com'],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const createMockConfiguration = (port: number) => {
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn(() => ({
|
||||
issuer: `http://localhost:${port}`,
|
||||
authorization_endpoint: `http://localhost:${port}/auth`,
|
||||
token_endpoint: `http://localhost:${port}/token`,
|
||||
jwks_uri: `http://localhost:${port}/jwks`,
|
||||
userinfo_endpoint: `http://localhost:${port}/userinfo`,
|
||||
})),
|
||||
};
|
||||
return mockConfig as unknown as client.Configuration;
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Set environment variable for config path
|
||||
process.env.PATHS_CONFIG = '/tmp/test-config';
|
||||
|
||||
mockConfigService = {
|
||||
get: vi.fn((key: string) => {
|
||||
if (key === 'oidc') {
|
||||
return {
|
||||
providers: [createMockProvider(1029)],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
}
|
||||
if (key === 'paths.config') {
|
||||
return '/tmp/test-config';
|
||||
}
|
||||
return undefined;
|
||||
}),
|
||||
set: vi.fn(),
|
||||
getOrThrow: vi.fn((key: string) => {
|
||||
if (key === 'paths.config' || key === 'paths') {
|
||||
return '/tmp/test-config';
|
||||
}
|
||||
return '/tmp/test-config';
|
||||
}),
|
||||
};
|
||||
|
||||
const mockUserSettingsService = {
|
||||
register: vi.fn(),
|
||||
getAllSettings: vi.fn(),
|
||||
getAllValues: vi.fn(),
|
||||
updateNamespacedValues: vi.fn(),
|
||||
};
|
||||
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcConfigPersistence,
|
||||
OidcClientConfigService,
|
||||
OidcValidationService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
{
|
||||
provide: UserSettingsService,
|
||||
useValue: mockUserSettingsService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
configPersistence = module.get<OidcConfigPersistence>(OidcConfigPersistence);
|
||||
clientConfigService = module.get<OidcClientConfigService>(OidcClientConfigService);
|
||||
|
||||
// Mock the persist method since we don't want to write to disk in tests
|
||||
vi.spyOn(configPersistence as any, 'persist').mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
describe('Cache clearing on provider update', () => {
|
||||
it('should clear cache when provider is updated via upsertProvider', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
// Mock validation service to return configs
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// First, get config for port 1029 - this caches it
|
||||
const config1 = await clientConfigService.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Spy on clearCache method
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Update the provider to port 1030 via upsertProvider
|
||||
await configPersistence.upsertProvider(provider1030);
|
||||
|
||||
// Verify cache was cleared for this specific provider
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith(provider1030.id);
|
||||
|
||||
// Now get config again - should fetch fresh config for port 1030
|
||||
const config2 = await clientConfigService.getOrCreateConfig(provider1030);
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
|
||||
// Verify discovery was called twice (not using cache)
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear cache when provider is deleted', async () => {
|
||||
const provider = createMockProvider(1029);
|
||||
const mockConfig = createMockConfiguration(1029);
|
||||
|
||||
// Setup initial provider in config
|
||||
mockConfigService.get.mockReturnValue({
|
||||
providers: [provider, { ...provider, id: 'other-provider' }],
|
||||
defaultAllowedOrigins: [],
|
||||
});
|
||||
|
||||
// Mock validation service
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'performDiscovery').mockResolvedValue(mockConfig);
|
||||
|
||||
// First, cache the provider config
|
||||
await clientConfigService.getOrCreateConfig(provider);
|
||||
|
||||
// Spy on clearCache
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Delete the provider
|
||||
const deleted = await configPersistence.deleteProvider(provider.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify cache was cleared for the deleted provider
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith(provider.id);
|
||||
});
|
||||
|
||||
it('should clear all provider caches when updated via settings updateValues', async () => {
|
||||
// This simulates what happens when settings are saved through the UI
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [
|
||||
{
|
||||
...createMockProvider(1030),
|
||||
authorizationMode: 'simple',
|
||||
simpleAuthorization: {
|
||||
allowedDomains: ['example.com'],
|
||||
allowedEmails: [],
|
||||
allowedUserIds: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Spy on clearCache
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Mock validation
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
|
||||
isValid: true,
|
||||
});
|
||||
|
||||
// Call the updateValues function (simulating saving settings from UI)
|
||||
await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Verify cache was cleared (called without arguments to clear all)
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith();
|
||||
});
|
||||
|
||||
it('should NOT require API restart after updating provider issuer', async () => {
|
||||
// This test confirms that the fix eliminates the need for API restart
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [createMockProvider(1030)],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Mock validation
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
|
||||
isValid: true,
|
||||
});
|
||||
|
||||
// Update settings
|
||||
const result = await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Verify that restartRequired is false
|
||||
expect(result.restartRequired).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Provider validation on save', () => {
|
||||
it('should validate providers and include warnings but still save', async () => {
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [
|
||||
createMockProvider(1030),
|
||||
{ ...createMockProvider(1031), id: 'invalid-provider', name: 'Invalid Provider' },
|
||||
],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Mock validation - first provider valid, second invalid
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider')
|
||||
.mockResolvedValueOnce({ isValid: true })
|
||||
.mockResolvedValueOnce({
|
||||
isValid: false,
|
||||
error: 'Discovery failed: Unable to reach issuer',
|
||||
});
|
||||
|
||||
// Update settings
|
||||
const result = await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Should save successfully but include warnings
|
||||
expect(result.restartRequired).toBe(false);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContain(
|
||||
'❌ Invalid Provider: Discovery failed: Unable to reach issuer'
|
||||
);
|
||||
expect(result.values.providers).toHaveLength(2);
|
||||
|
||||
// Cache should still be cleared even with validation warnings
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
await settingsCallback.updateValues(newConfig);
|
||||
expect(clearCacheSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { forwardRef, Inject, Injectable, Optional } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { RuleEffect } from '@jsonforms/core';
|
||||
@@ -6,6 +6,7 @@ import { mergeSettingSlices } from '@unraid/shared/jsonforms/settings.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import {
|
||||
AuthorizationOperator,
|
||||
@@ -30,7 +31,10 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
constructor(
|
||||
configService: ConfigService,
|
||||
private readonly userSettings: UserSettingsService,
|
||||
private readonly validationService: OidcValidationService
|
||||
private readonly validationService: OidcValidationService,
|
||||
@Optional()
|
||||
@Inject(forwardRef(() => OidcClientConfigService))
|
||||
private readonly clientConfigService?: OidcClientConfigService
|
||||
) {
|
||||
super(configService);
|
||||
this.registerSettings();
|
||||
@@ -252,6 +256,15 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), newConfig);
|
||||
await this.persist(newConfig);
|
||||
|
||||
// Clear the OIDC client configuration cache when a provider is updated
|
||||
// This ensures the new issuer/endpoints are used immediately
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache(cleanedProvider.id);
|
||||
this.logger.debug(
|
||||
`Cleared OIDC client configuration cache for provider ${cleanedProvider.id}`
|
||||
);
|
||||
}
|
||||
|
||||
return cleanedProvider;
|
||||
}
|
||||
|
||||
@@ -328,6 +341,12 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), newConfig);
|
||||
await this.persist(newConfig);
|
||||
|
||||
// Clear the cache for the deleted provider
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache(id);
|
||||
this.logger.debug(`Cleared OIDC client configuration cache for deleted provider ${id}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -440,6 +459,13 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), processedConfig);
|
||||
await this.persist(processedConfig);
|
||||
|
||||
// Clear the OIDC client configuration cache to ensure fresh discovery
|
||||
// This fixes the issue where changing issuer URLs requires API restart
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache();
|
||||
this.logger.debug('Cleared OIDC client configuration cache after provider update');
|
||||
}
|
||||
|
||||
// Include validation results in response
|
||||
const response: { restartRequired: boolean; values: OidcConfig; warnings?: string[] } = {
|
||||
restartRequired: false,
|
||||
|
||||
350
api/src/unraid-api/rest/rest.service.test.ts
Normal file
350
api/src/unraid-api/rest/rest.service.test.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import type { ReadStream, Stats } from 'node:fs';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { stat, writeFile } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import { execa, ExecaError } from 'execa';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { ApiReportData } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { RestService } from '@app/unraid-api/rest/rest.service.js';
|
||||
|
||||
vi.mock('node:fs');
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/store/index.js');
|
||||
vi.mock('@app/core/utils/images/image-file-helpers.js', () => ({
|
||||
getBannerPathIfPresent: vi.fn(),
|
||||
getCasePathIfPresent: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('RestService', () => {
|
||||
let service: RestService;
|
||||
let apiReportService: ApiReportService;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
RestService,
|
||||
{
|
||||
provide: ApiReportService,
|
||||
useValue: {
|
||||
generateReport: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<RestService>(RestService);
|
||||
apiReportService = module.get<ApiReportService>(ApiReportService);
|
||||
});
|
||||
|
||||
describe('getLogs', () => {
|
||||
const mockLogPath = '/usr/local/emhttp/logs/unraid-api';
|
||||
const mockGraphqlApiLog = '/var/log/graphql-api.log';
|
||||
const mockZipPath = '/usr/local/emhttp/logs/unraid-api.tar.gz';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(getters).paths = vi.fn().mockReturnValue({
|
||||
'log-base': mockLogPath,
|
||||
});
|
||||
// Mock saveApiReport to avoid side effects
|
||||
vi.spyOn(service as any, 'saveApiReport').mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it('should create and return log archive successfully', async () => {
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath || path === mockZipPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getLogs();
|
||||
|
||||
expect(execa).toHaveBeenCalledWith('tar', ['-czf', mockZipPath, mockLogPath], {
|
||||
timeout: 60000,
|
||||
reject: true,
|
||||
});
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockZipPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should include graphql-api.log when it exists', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath || path === mockGraphqlApiLog || path === mockZipPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
vi.mocked(createReadStream).mockReturnValue(Readable.from([]) as ReadStream);
|
||||
|
||||
await service.getLogs();
|
||||
|
||||
expect(execa).toHaveBeenCalledWith(
|
||||
'tar',
|
||||
['-czf', mockZipPath, mockLogPath, mockGraphqlApiLog],
|
||||
{
|
||||
timeout: 60000,
|
||||
reject: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle timeout errors with detailed message', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const timeoutError = new Error('Command timed out') as ExecaError;
|
||||
timeoutError.timedOut = true;
|
||||
timeoutError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
timeoutError.exitCode = undefined;
|
||||
timeoutError.stderr = '';
|
||||
timeoutError.stdout = '';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(timeoutError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Tar command timed out after 60 seconds');
|
||||
});
|
||||
|
||||
it('should handle command failure with exit code and stderr', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const execError = new Error('Command failed') as ExecaError;
|
||||
execError.exitCode = 1;
|
||||
execError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
execError.stderr = 'tar: Cannot create archive';
|
||||
execError.stdout = '';
|
||||
execError.shortMessage = 'Command failed with exit code 1';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(execError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Tar command failed with exit code 1');
|
||||
await expect(service.getLogs()).rejects.toThrow('tar: Cannot create archive');
|
||||
});
|
||||
|
||||
it('should handle case when tar succeeds but zip file is not created', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
// Zip file doesn't exist after tar command
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
exitCode: 0,
|
||||
} as any);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow(
|
||||
'Failed to create log zip - tar file not found after successful command'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when log path does not exist', async () => {
|
||||
vi.mocked(stat).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('No logs to download');
|
||||
});
|
||||
|
||||
it('should handle generic errors', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const genericError = new Error('Unexpected error');
|
||||
vi.mocked(execa).mockRejectedValue(genericError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow(
|
||||
'Failed to create logs archive: Unexpected error'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors with stdout in addition to stderr', async () => {
|
||||
vi.mocked(stat).mockImplementation((path) => {
|
||||
if (path === mockLogPath) {
|
||||
return Promise.resolve({ isFile: () => true } as unknown as Stats);
|
||||
}
|
||||
return Promise.reject(new Error('File not found'));
|
||||
});
|
||||
|
||||
const execError = new Error('Command failed') as ExecaError;
|
||||
execError.exitCode = 1;
|
||||
execError.command =
|
||||
'tar -czf /usr/local/emhttp/logs/unraid-api.tar.gz /usr/local/emhttp/logs/unraid-api';
|
||||
execError.stderr = 'tar: Error';
|
||||
execError.stdout = 'Processing archive...';
|
||||
execError.shortMessage = 'Command failed with exit code 1';
|
||||
|
||||
vi.mocked(execa).mockRejectedValue(execError);
|
||||
|
||||
await expect(service.getLogs()).rejects.toThrow('Stdout: Processing archive');
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveApiReport', () => {
|
||||
it('should generate and save API report', async () => {
|
||||
const mockReport: ApiReportData = {
|
||||
timestamp: new Date().toISOString(),
|
||||
connectionStatus: { running: 'yes' },
|
||||
system: {
|
||||
name: 'Test Server',
|
||||
version: '6.12.0',
|
||||
machineId: 'test-machine-id',
|
||||
},
|
||||
connect: {
|
||||
installed: false,
|
||||
},
|
||||
config: {
|
||||
valid: true,
|
||||
},
|
||||
services: {
|
||||
cloud: null,
|
||||
minigraph: null,
|
||||
allServices: [],
|
||||
},
|
||||
};
|
||||
const mockPath = '/test/report.json';
|
||||
|
||||
vi.mocked(apiReportService.generateReport).mockResolvedValue(mockReport);
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await service.saveApiReport(mockPath);
|
||||
|
||||
expect(apiReportService.generateReport).toHaveBeenCalled();
|
||||
expect(writeFile).toHaveBeenCalledWith(
|
||||
mockPath,
|
||||
JSON.stringify(mockReport, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when generating report', async () => {
|
||||
const mockPath = '/test/report.json';
|
||||
|
||||
vi.mocked(apiReportService.generateReport).mockRejectedValue(
|
||||
new Error('Report generation failed')
|
||||
);
|
||||
|
||||
// Should not throw, just log warning
|
||||
await expect(service.saveApiReport(mockPath)).resolves.toBeUndefined();
|
||||
expect(apiReportService.generateReport).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomizationPath', () => {
|
||||
it('should return banner path when type is banner', async () => {
|
||||
const mockBannerPath = '/path/to/banner.png';
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(mockBannerPath);
|
||||
|
||||
const result = await service.getCustomizationPath('banner');
|
||||
|
||||
expect(getBannerPathIfPresent).toHaveBeenCalled();
|
||||
expect(result).toBe(mockBannerPath);
|
||||
});
|
||||
|
||||
it('should return case path when type is case', async () => {
|
||||
const mockCasePath = '/path/to/case.png';
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(mockCasePath);
|
||||
|
||||
const result = await service.getCustomizationPath('case');
|
||||
|
||||
expect(getCasePathIfPresent).toHaveBeenCalled();
|
||||
expect(result).toBe(mockCasePath);
|
||||
});
|
||||
|
||||
it('should return null when no banner found', async () => {
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(null);
|
||||
|
||||
const result = await service.getCustomizationPath('banner');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when no case found', async () => {
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(null);
|
||||
|
||||
const result = await service.getCustomizationPath('case');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomizationStream', () => {
|
||||
it('should return read stream for banner', async () => {
|
||||
const mockPath = '/path/to/banner.png';
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(mockPath);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getCustomizationStream('banner');
|
||||
|
||||
expect(getBannerPathIfPresent).toHaveBeenCalled();
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should return read stream for case', async () => {
|
||||
const mockPath = '/path/to/case.png';
|
||||
const mockStream: ReadStream = Readable.from([]) as ReadStream;
|
||||
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(mockPath);
|
||||
vi.mocked(createReadStream).mockReturnValue(mockStream);
|
||||
|
||||
const result = await service.getCustomizationStream('case');
|
||||
|
||||
expect(getCasePathIfPresent).toHaveBeenCalled();
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockPath);
|
||||
expect(result).toBe(mockStream);
|
||||
});
|
||||
|
||||
it('should throw error when no banner found', async () => {
|
||||
vi.mocked(getBannerPathIfPresent).mockResolvedValue(null);
|
||||
|
||||
await expect(service.getCustomizationStream('banner')).rejects.toThrow('No banner found');
|
||||
});
|
||||
|
||||
it('should throw error when no case found', async () => {
|
||||
vi.mocked(getCasePathIfPresent).mockResolvedValue(null);
|
||||
|
||||
await expect(service.getCustomizationStream('case')).rejects.toThrow('No case found');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,7 @@ import { createReadStream } from 'node:fs';
|
||||
import { stat, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { ExecaError } from 'execa';
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
@@ -31,6 +32,8 @@ export class RestService {
|
||||
|
||||
async getLogs(): Promise<ReadStream> {
|
||||
const logPath = getters.paths()['log-base'];
|
||||
const graphqlApiLog = '/var/log/graphql-api.log';
|
||||
|
||||
try {
|
||||
await this.saveApiReport(join(logPath, 'report.json'));
|
||||
} catch (error) {
|
||||
@@ -41,16 +44,62 @@ export class RestService {
|
||||
const logPathExists = Boolean(await stat(logPath).catch(() => null));
|
||||
if (logPathExists) {
|
||||
try {
|
||||
await execa('tar', ['-czf', zipToWrite, logPath]);
|
||||
// Build tar command arguments
|
||||
const tarArgs = ['-czf', zipToWrite, logPath];
|
||||
|
||||
// Check if graphql-api.log exists and add it to the archive
|
||||
const graphqlLogExists = Boolean(await stat(graphqlApiLog).catch(() => null));
|
||||
if (graphqlLogExists) {
|
||||
tarArgs.push(graphqlApiLog);
|
||||
this.logger.debug('Including graphql-api.log in archive');
|
||||
}
|
||||
|
||||
// Execute tar with timeout and capture output
|
||||
await execa('tar', tarArgs, {
|
||||
timeout: 60000, // 60 seconds timeout for tar operation
|
||||
reject: true, // Throw on non-zero exit (default behavior)
|
||||
});
|
||||
|
||||
const tarFileExists = Boolean(await stat(zipToWrite).catch(() => null));
|
||||
|
||||
if (tarFileExists) {
|
||||
return createReadStream(zipToWrite);
|
||||
} else {
|
||||
throw new Error('Failed to create log zip');
|
||||
throw new Error(
|
||||
'Failed to create log zip - tar file not found after successful command'
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('Failed to create logs');
|
||||
// Build detailed error message with execa's built-in error info
|
||||
let errorMessage = 'Failed to create logs archive';
|
||||
|
||||
if (error && typeof error === 'object' && 'command' in error) {
|
||||
const execaError = error as ExecaError;
|
||||
|
||||
if (execaError.timedOut) {
|
||||
errorMessage = `Tar command timed out after 60 seconds. Command: ${execaError.command}`;
|
||||
} else if (execaError.exitCode !== undefined) {
|
||||
errorMessage = `Tar command failed with exit code ${execaError.exitCode}. Command: ${execaError.command}`;
|
||||
}
|
||||
|
||||
// Add stderr/stdout if available
|
||||
if (execaError.stderr) {
|
||||
errorMessage += `. Stderr: ${execaError.stderr}`;
|
||||
}
|
||||
if (execaError.stdout) {
|
||||
errorMessage += `. Stdout: ${execaError.stdout}`;
|
||||
}
|
||||
|
||||
// Include the short message from execa
|
||||
if (execaError.shortMessage) {
|
||||
errorMessage += `. Details: ${execaError.shortMessage}`;
|
||||
}
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage += `: ${error.message}`;
|
||||
}
|
||||
|
||||
this.logger.error(errorMessage, error);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
} else {
|
||||
throw new Error('No logs to download');
|
||||
|
||||
@@ -0,0 +1,149 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import * as getUnraidVersionModule from '@app/common/dashboard/get-unraid-version.js';
|
||||
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
vi.mock('@app/common/dashboard/get-unraid-version.js');
|
||||
|
||||
class TestFileModification extends FileModification {
|
||||
id = 'test';
|
||||
filePath = '/test/file';
|
||||
|
||||
protected async generatePatch(): Promise<string> {
|
||||
return 'test patch';
|
||||
}
|
||||
}
|
||||
|
||||
describe('FileModification', () => {
|
||||
let modification: TestFileModification;
|
||||
let getUnraidVersionMock: any;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
const logger = new Logger('TestFileModification');
|
||||
modification = new TestFileModification(logger);
|
||||
getUnraidVersionMock = vi.mocked(getUnraidVersionModule.getUnraidVersion);
|
||||
});
|
||||
|
||||
describe('version comparison methods', () => {
|
||||
describe('isUnraidVersionGreaterThanOrEqualTo', () => {
|
||||
it('should return true when current version is greater', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.3.0');
|
||||
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version is equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0');
|
||||
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is less', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.1.0');
|
||||
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle prerelease versions correctly', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
|
||||
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.1');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should treat prerelease as greater than stable when base versions are equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
|
||||
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0', {
|
||||
includePrerelease: true,
|
||||
});
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should compare prerelease versions correctly', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.4');
|
||||
const result =
|
||||
await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.2.3');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle beta.2.3 being less than beta.2.4', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
|
||||
const result =
|
||||
await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.2.4');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isUnraidVersionLessThanOrEqualTo', () => {
|
||||
it('should return true when current version is less', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.1.0');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version is equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is greater', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.3.0');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle prerelease versions correctly', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.1');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should treat prerelease as less than stable when base versions are equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0', {
|
||||
includePrerelease: true,
|
||||
});
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should compare prerelease versions correctly', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.4');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle beta.2.3 being equal to beta.2.3', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.3');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle beta.2.4 being greater than beta.2.3', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.4');
|
||||
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.3');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('inverse relationship', () => {
|
||||
it('should have opposite results for greater-than-or-equal and less-than-or-equal when not equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.5');
|
||||
const gte = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
|
||||
const lte = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
|
||||
expect(gte).toBe(true);
|
||||
expect(lte).toBe(false);
|
||||
});
|
||||
|
||||
it('should both return true when versions are equal', async () => {
|
||||
getUnraidVersionMock.mockResolvedValue('7.2.0');
|
||||
const gte = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
|
||||
const lte = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
|
||||
expect(gte).toBe(true);
|
||||
expect(lte).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,7 +5,7 @@ import { access, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import { basename, dirname, join } from 'path';
|
||||
|
||||
import { applyPatch, createPatch, parsePatch, reversePatch } from 'diff';
|
||||
import { coerce, compare, gte } from 'semver';
|
||||
import { coerce, compare, gte, lte } from 'semver';
|
||||
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version.js';
|
||||
|
||||
@@ -259,29 +259,53 @@ export abstract class FileModification {
|
||||
return patch;
|
||||
}
|
||||
|
||||
protected async isUnraidVersionGreaterThanOrEqualTo(
|
||||
version: string = '7.2.0', // Defaults to the version of Unraid that includes the API by default
|
||||
private async compareUnraidVersion(
|
||||
version: string,
|
||||
compareFn: typeof gte | typeof lte,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): Promise<boolean> {
|
||||
const unraidVersion = coerce(await getUnraidVersion(), { includePrerelease });
|
||||
const comparedVersion = coerce(version, { includePrerelease });
|
||||
|
||||
if (!unraidVersion) {
|
||||
throw new Error(`Failed to compare Unraid version - missing unraid version`);
|
||||
}
|
||||
if (!comparedVersion) {
|
||||
throw new Error(`Failed to compare Unraid version - missing comparison version`);
|
||||
}
|
||||
// If includePrerelease and base versions are equal, treat prerelease as greater
|
||||
|
||||
// Special handling for prerelease versions when base versions are equal
|
||||
if (includePrerelease) {
|
||||
const baseUnraid = `${unraidVersion.major}.${unraidVersion.minor}.${unraidVersion.patch}`;
|
||||
const baseCompared = `${comparedVersion.major}.${comparedVersion.minor}.${comparedVersion.patch}`;
|
||||
|
||||
if (baseUnraid === baseCompared) {
|
||||
// If unraidVersion has prerelease and comparedVersion does not, treat as greater
|
||||
if (unraidVersion.prerelease.length && !comparedVersion.prerelease.length) {
|
||||
return true;
|
||||
const unraidHasPrerelease = unraidVersion.prerelease.length > 0;
|
||||
const comparedHasPrerelease = comparedVersion.prerelease.length > 0;
|
||||
|
||||
// If one has prerelease and the other doesn't, handle specially
|
||||
if (unraidHasPrerelease && !comparedHasPrerelease) {
|
||||
// For gte: prerelease is considered greater than stable
|
||||
// For lte: prerelease is considered less than stable
|
||||
return compareFn === gte;
|
||||
}
|
||||
}
|
||||
}
|
||||
return gte(unraidVersion, comparedVersion);
|
||||
|
||||
return compareFn(unraidVersion, comparedVersion);
|
||||
}
|
||||
|
||||
protected async isUnraidVersionGreaterThanOrEqualTo(
|
||||
version: string = '7.2.0', // Defaults to the version of Unraid that includes the API by default
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): Promise<boolean> {
|
||||
return this.compareUnraidVersion(version, gte, { includePrerelease });
|
||||
}
|
||||
|
||||
protected async isUnraidVersionLessThanOrEqualTo(
|
||||
version: string,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): Promise<boolean> {
|
||||
return this.compareUnraidVersion(version, lte, { includePrerelease });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,334 @@
|
||||
Menu="UserPreferences"
|
||||
Title="Display Settings"
|
||||
Icon="icon-display"
|
||||
Tag="desktop"
|
||||
---
|
||||
<?PHP
|
||||
/* Copyright 2005-2025, Lime Technology
|
||||
* Copyright 2012-2025, Bergware International.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License version 2,
|
||||
* as published by the Free Software Foundation.
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*/
|
||||
?>
|
||||
<?
|
||||
$void = "<img src='/webGui/images/banner.png' id='image' width='330' height='30' onclick='$("#drop").click()' style='cursor:pointer' title='_(Click to select PNG file)_'>";
|
||||
$icon = "<i class='fa fa-trash top' title='_(Restore default image)_' onclick='restore()'></i>";
|
||||
$plugins = '/var/log/plugins';
|
||||
|
||||
require_once "$docroot/plugins/dynamix.plugin.manager/include/PluginHelpers.php";
|
||||
?>
|
||||
<script src="<?autov('/webGui/javascript/jquery.filedrop.js')?>"></script>
|
||||
<script>
|
||||
var path = '/boot/config/plugins/dynamix';
|
||||
var filename = '';
|
||||
var locale = "<?=$locale?>";
|
||||
|
||||
function restore() {
|
||||
// restore original image and activate APPLY button
|
||||
$('#dropbox').html("<?=$void?>");
|
||||
$('select[name="banner"]').trigger('change');
|
||||
filename = 'reset';
|
||||
}
|
||||
function upload(lang) {
|
||||
// save or delete upload when APPLY is pressed
|
||||
if (filename=='reset') {
|
||||
$.post("/webGui/include/FileUpload.php",{cmd:'delete',path:path,filename:'banner.png'});
|
||||
} else if (filename) {
|
||||
$.post("/webGui/include/FileUpload.php",{cmd:'save',path:path,filename:filename,output:'banner.png'});
|
||||
}
|
||||
// reset dashboard tiles when switching language
|
||||
if (lang != locale) {
|
||||
$.removeCookie('db-box1');
|
||||
$.removeCookie('db-box2');
|
||||
$.removeCookie('db-box3');
|
||||
$.removeCookie('inactive_content');
|
||||
$.removeCookie('hidden_content');
|
||||
}
|
||||
}
|
||||
function presetBanner(form) {
|
||||
if (form.banner.selectedIndex == 0) $('.js-bannerSettings').hide(); else $('.js-bannerSettings').show();
|
||||
}
|
||||
function presetRefresh(form) {
|
||||
for (var i=0,item; item=form.refresh.options[i]; i++) item.value *= -1;
|
||||
}
|
||||
function presetPassive(index) {
|
||||
if (index==0) $('#passive').hide(); else $('#passive').show();
|
||||
}
|
||||
function updateDirection(lang) {
|
||||
// var rtl = ['ar_AR','fa_FA'].includes(lang) ? "dir='rtl' " : "";
|
||||
// RTL display is not giving the desired results, we keep LTR
|
||||
var rtl = "";
|
||||
$('input[name="rtl"]').val(rtl);
|
||||
}
|
||||
|
||||
$(function() {
|
||||
var dropbox = $('#dropbox');
|
||||
// attach the drag-n-drop feature to the 'dropbox' element
|
||||
dropbox.filedrop({
|
||||
maxfiles:1,
|
||||
maxfilesize:512, // KB
|
||||
data: {"csrf_token": "<?=$var['csrf_token']?>"},
|
||||
url:'/webGui/include/FileUpload.php',
|
||||
beforeEach:function(file) {
|
||||
if (!file.type.match(/^image\/.*/)) {
|
||||
swal({title:"_(Warning)_",text:"_(Only PNG images are allowed)_!",type:"warning",html:true,confirmButtonText:"_(Ok)_"});
|
||||
return false;
|
||||
}
|
||||
},
|
||||
error: function(err, file, i) {
|
||||
switch (err) {
|
||||
case 'BrowserNotSupported':
|
||||
swal({title:"_(Browser error)_",text:"_(Your browser does not support HTML5 file uploads)_!",type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
break;
|
||||
case 'TooManyFiles':
|
||||
swal({title:"_(Too many files)_",text:"_(Please select one file only)_!",html:true,type:"error"});
|
||||
break;
|
||||
case 'FileTooLarge':
|
||||
swal({title:"_(File too large)_",text:"_(Maximum file upload size is 512K)_ (524,288 _(bytes)_)",type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
break;
|
||||
}
|
||||
},
|
||||
uploadStarted:function(i,file,count) {
|
||||
var image = $('img', $(dropbox));
|
||||
var reader = new FileReader();
|
||||
image.width = 330;
|
||||
image.height = 30;
|
||||
reader.onload = function(e){image.attr('src',e.target.result);};
|
||||
reader.readAsDataURL(file);
|
||||
},
|
||||
uploadFinished:function(i,file,response) {
|
||||
if (response == 'OK 200') {
|
||||
if (!filename || filename=='reset') $(dropbox).append("<?=$icon?>");
|
||||
$('select[name="banner"]').trigger('change');
|
||||
filename = file.name;
|
||||
} else {
|
||||
swal({title:"_(Upload error)_",text:response,type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
}
|
||||
}
|
||||
});
|
||||
// simulate a drop action when manual file selection is done
|
||||
$('#drop').bind('change', function(e) {
|
||||
var files = e.target.files;
|
||||
if ($('#dropbox').triggerHandler({type:'drop',dataTransfer:{files:files}})==false) e.stopImmediatePropagation();
|
||||
});
|
||||
presetBanner(document.display_settings);
|
||||
});
|
||||
</script>
|
||||
|
||||
:display_settings_help:
|
||||
|
||||
<form markdown="1" name="display_settings" method="POST" action="/update.php" target="progressFrame" onsubmit="upload(this.locale.value)">
|
||||
<input type="hidden" name="#file" value="dynamix/dynamix.cfg">
|
||||
<input type="hidden" name="#section" value="display">
|
||||
<input type="hidden" name="rtl" value="<?=$display['rtl']?>">
|
||||
|
||||
_(Display width)_:
|
||||
: <select name="width">
|
||||
<?=mk_option($display['width'], "",_('Boxed'))?>
|
||||
<?=mk_option($display['width'], "1",_('Unlimited'))?>
|
||||
</select>
|
||||
|
||||
:display_width_help:
|
||||
|
||||
_(Language)_:
|
||||
: <select name="locale" class="fixed" onchange="updateDirection(this.value)">
|
||||
<?echo mk_option($display['locale'], "","English");
|
||||
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
|
||||
$lang = language('Language', $xml_file);
|
||||
$home = language('LanguageLocal', $xml_file);
|
||||
$name = language('LanguagePack', $xml_file);
|
||||
echo mk_option($display['locale'], $name, "$home ($lang)");
|
||||
}
|
||||
?></select>
|
||||
|
||||
_(Font size)_:
|
||||
: <select name="font" id='font'>
|
||||
<?=mk_option($display['font'], "50",_('Very small'))?>
|
||||
<?=mk_option($display['font'], "56.25",_('Small'))?>
|
||||
<?=mk_option($display['font'], "",_('Normal'))?>
|
||||
<?=mk_option($display['font'], "68.75",_('Large'))?>
|
||||
<?=mk_option($display['font'], "75",_('Very large'))?>
|
||||
<?=mk_option($display['font'], "80",_('Huge'))?>
|
||||
</select>
|
||||
|
||||
:display_font_size_help:
|
||||
|
||||
_(Terminal font size)_:
|
||||
: <select name="tty" id="tty">
|
||||
<?=mk_option($display['tty'], "11",_('Very small'))?>
|
||||
<?=mk_option($display['tty'], "13",_('Small'))?>
|
||||
<?=mk_option($display['tty'], "15",_('Normal'))?>
|
||||
<?=mk_option($display['tty'], "17",_('Large'))?>
|
||||
<?=mk_option($display['tty'], "19",_('Very large'))?>
|
||||
<?=mk_option($display['tty'], "21",_('Huge'))?>
|
||||
</select>
|
||||
|
||||
:display_tty_size_help:
|
||||
|
||||
_(Number format)_:
|
||||
: <select name="number">
|
||||
<?=mk_option($display['number'], ".,",_('[D] dot : [G] comma'))?>
|
||||
<?=mk_option($display['number'], ". ",_('[D] dot : [G] space'))?>
|
||||
<?=mk_option($display['number'], ".",_('[D] dot : [G] none'))?>
|
||||
<?=mk_option($display['number'], ",.",_('[D] comma : [G] dot'))?>
|
||||
<?=mk_option($display['number'], ", ",_('[D] comma : [G] space'))?>
|
||||
<?=mk_option($display['number'], ",",_('[D] comma : [G] none'))?>
|
||||
</select>
|
||||
|
||||
_(Number scaling)_:
|
||||
: <select name="scale">
|
||||
<?=mk_option($display['scale'], "-1",_('Automatic'))?>
|
||||
<?=mk_option($display['scale'], "0",_('Disabled'))?>
|
||||
<?=mk_option($display['scale'], "1",_('KB'))?>
|
||||
<?=mk_option($display['scale'], "2",_('MB'))?>
|
||||
<?=mk_option($display['scale'], "3",_('GB'))?>
|
||||
<?=mk_option($display['scale'], "4",_('TB'))?>
|
||||
<?=mk_option($display['scale'], "5",_('PB'))?>
|
||||
</select>
|
||||
|
||||
_(Page view)_:
|
||||
: <select name="tabs">
|
||||
<?=mk_option($display['tabs'], "0",_('Tabbed'))?>
|
||||
<?=mk_option($display['tabs'], "1",_('Non-tabbed'))?>
|
||||
</select>
|
||||
|
||||
:display_page_view_help:
|
||||
|
||||
_(Placement of Users menu)_:
|
||||
: <select name="users">
|
||||
<?=mk_option($display['users'], "Tasks:3",_('Header menu'))?>
|
||||
<?=mk_option($display['users'], "UserPreferences",_('Settings menu'))?>
|
||||
</select>
|
||||
|
||||
:display_users_menu_help:
|
||||
|
||||
_(Listing height)_:
|
||||
: <select name="resize">
|
||||
<?=mk_option($display['resize'], "0",_('Automatic'))?>
|
||||
<?=mk_option($display['resize'], "1",_('Fixed'))?>
|
||||
</select>
|
||||
|
||||
:display_listing_height_help:
|
||||
|
||||
_(Display device name)_:
|
||||
: <select name="raw">
|
||||
<?=mk_option($display['raw'], "",_('Normalized'))?>
|
||||
<?=mk_option($display['raw'], "1",_('Raw'))?>
|
||||
</select>
|
||||
|
||||
_(Display world-wide-name in device ID)_:
|
||||
: <select name="wwn">
|
||||
<?=mk_option($display['wwn'], "0",_('Disabled'))?>
|
||||
<?=mk_option($display['wwn'], "1",_('Automatic'))?>
|
||||
</select>
|
||||
|
||||
:display_wwn_device_id_help:
|
||||
|
||||
_(Display array totals)_:
|
||||
: <select name="total">
|
||||
<?=mk_option($display['total'], "0",_('No'))?>
|
||||
<?=mk_option($display['total'], "1",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
_(Show array utilization indicator)_:
|
||||
: <select name="usage">
|
||||
<?=mk_option($display['usage'], "0",_('No'))?>
|
||||
<?=mk_option($display['usage'], "1",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
_(Temperature unit)_:
|
||||
: <select name="unit">
|
||||
<?=mk_option($display['unit'], "C",_('Celsius'))?>
|
||||
<?=mk_option($display['unit'], "F",_('Fahrenheit'))?>
|
||||
</select>
|
||||
|
||||
:display_temperature_unit_help:
|
||||
|
||||
_(Dynamix color theme)_:
|
||||
: <select name="theme">
|
||||
<?foreach (glob("$docroot/webGui/styles/themes/*.css") as $themes):?>
|
||||
<?$theme = basename($themes, '.css');?>
|
||||
<?=mk_option($display['theme'], $theme, _(ucfirst($theme)))?>
|
||||
<?endforeach;?>
|
||||
</select>
|
||||
|
||||
_(Used / Free columns)_:
|
||||
: <select name="text">
|
||||
<?=mk_option($display['text'], "0",_('Text'))?>
|
||||
<?=mk_option($display['text'], "1",_('Bar (gray)'))?>
|
||||
<?=mk_option($display['text'], "2",_('Bar (color)'))?>
|
||||
<?=mk_option($display['text'], "10",_('Text - Bar (gray)'))?>
|
||||
<?=mk_option($display['text'], "20",_('Text - Bar (color)'))?>
|
||||
<?=mk_option($display['text'], "11",_('Bar (gray) - Text'))?>
|
||||
<?=mk_option($display['text'], "21",_('Bar (color) - Text'))?>
|
||||
</select>
|
||||
|
||||
_(Header custom text color)_:
|
||||
: <input type="text" class="narrow" name="header" value="<?=$display['header']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
:display_custom_text_color_help:
|
||||
|
||||
_(Header custom secondary text color)_:
|
||||
: <input type="text" class="narrow" name="headermetacolor" value="<?=$display['headermetacolor']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
_(Header custom background color)_:
|
||||
: <input type="text" class="narrow" name="background" value="<?=$display['background']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
:display_custom_background_color_help:
|
||||
|
||||
_(Header show description)_:
|
||||
: <select name="headerdescription">
|
||||
<?=mk_option($display['headerdescription'], "yes",_('Yes'))?>
|
||||
<?=mk_option($display['headerdescription'], "no",_('No'))?>
|
||||
</select>
|
||||
|
||||
_(Show banner)_:
|
||||
: <select name="banner" onchange="presetBanner(this.form)">
|
||||
<?=mk_option($display['banner'], "",_('No'))?>
|
||||
<?=mk_option($display['banner'], "image",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
<div class="js-bannerSettings" markdown="1" style="display:none">
|
||||
_(Custom banner)_:
|
||||
<input type="hidden" name="#custom" value="">
|
||||
: <span id="dropbox">
|
||||
<?if (file_exists($banner)):?>
|
||||
<img src="<?=autov($banner)?>" width="330" height="30" onclick="$('#drop').click()" style="cursor:pointer" title="_(Click to select PNG file)_"><?=$icon?>
|
||||
<?else:?>
|
||||
<?=$void?>
|
||||
<?endif;?>
|
||||
</span><em>_(Drag-n-drop a PNG file or click the image at the left)_.</em><input type="file" id="drop" accept="image/*" style="display:none">
|
||||
|
||||
:display_custom_banner_help:
|
||||
</div>
|
||||
|
||||
<div class="js-bannerSettings" markdown="1" style="display:none">
|
||||
_(Show banner background color fade)_:
|
||||
: <select name="showBannerGradient">
|
||||
<?=mk_option($display['showBannerGradient'], "no",_('No'))?>
|
||||
<?=mk_option($display['showBannerGradient'], "yes",_('Yes'))?>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
_(Favorites enabled)_:
|
||||
: <select name="favorites">
|
||||
<?=mk_option($display['favorites'], "yes",_('Yes'))?>
|
||||
<?=mk_option($display['favorites'], "no",_('No'))?>
|
||||
</select>
|
||||
|
||||
:display_favorites_enabled_help:
|
||||
|
||||
_(Allow realtime updates on inactive browsers)_:
|
||||
: <select name='liveUpdate'>
|
||||
<?=mk_option($display['liveUpdate'],"no",_('No'))?>
|
||||
<?=mk_option($display['liveUpdate'],"yes",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
<input type="submit" name="#default" value="_(Default)_" onclick="filename='reset'">
|
||||
: <input type="submit" name="#apply" value="_(Apply)_" disabled><input type="button" value="_(Done)_" onclick="done()">
|
||||
</form>
|
||||
@@ -8,6 +8,7 @@ import { describe, expect, test, vi } from 'vitest';
|
||||
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
import AuthRequestModification from '@app/unraid-api/unraid-file-modifier/modifications/auth-request.modification.js';
|
||||
import DefaultPageLayoutModification from '@app/unraid-api/unraid-file-modifier/modifications/default-page-layout.modification.js';
|
||||
import DisplaySettingsModification from '@app/unraid-api/unraid-file-modifier/modifications/display-settings.modification.js';
|
||||
import NotificationsPageModification from '@app/unraid-api/unraid-file-modifier/modifications/notifications-page.modification.js';
|
||||
import RcNginxModification from '@app/unraid-api/unraid-file-modifier/modifications/rc-nginx.modification.js';
|
||||
import SSOFileModification from '@app/unraid-api/unraid-file-modifier/modifications/sso.modification.js';
|
||||
@@ -35,6 +36,12 @@ const patchTestCases: ModificationTestCase[] = [
|
||||
'https://raw.githubusercontent.com/unraid/webgui/refs/heads/7.1/emhttp/plugins/dynamix/Notifications.page',
|
||||
fileName: 'Notifications.page',
|
||||
},
|
||||
{
|
||||
ModificationClass: DisplaySettingsModification,
|
||||
fileUrl:
|
||||
'https://raw.githubusercontent.com/unraid/webgui/refs/heads/7.1/emhttp/plugins/dynamix/DisplaySettings.page',
|
||||
fileName: 'DisplaySettings.page',
|
||||
},
|
||||
{
|
||||
ModificationClass: SSOFileModification,
|
||||
fileUrl:
|
||||
|
||||
@@ -0,0 +1,334 @@
|
||||
Menu="UserPreferences"
|
||||
Title="Display Settings"
|
||||
Icon="icon-display"
|
||||
Tag="desktop"
|
||||
---
|
||||
<?PHP
|
||||
/* Copyright 2005-2025, Lime Technology
|
||||
* Copyright 2012-2025, Bergware International.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License version 2,
|
||||
* as published by the Free Software Foundation.
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*/
|
||||
?>
|
||||
<?
|
||||
$void = "<img src='/webGui/images/banner.png' id='image' width='330' height='30' onclick='$("#drop").click()' style='cursor:pointer' title='_(Click to select PNG file)_'>";
|
||||
$icon = "<i class='fa fa-trash top' title='_(Restore default image)_' onclick='restore()'></i>";
|
||||
$plugins = '/var/log/plugins';
|
||||
|
||||
require_once "$docroot/plugins/dynamix.plugin.manager/include/PluginHelpers.php";
|
||||
?>
|
||||
<script src="<?autov('/webGui/javascript/jquery.filedrop.js')?>"></script>
|
||||
<script>
|
||||
var path = '/boot/config/plugins/dynamix';
|
||||
var filename = '';
|
||||
var locale = "<?=$locale?>";
|
||||
|
||||
function restore() {
|
||||
// restore original image and activate APPLY button
|
||||
$('#dropbox').html("<?=$void?>");
|
||||
$('select[name="banner"]').trigger('change');
|
||||
filename = 'reset';
|
||||
}
|
||||
function upload(lang) {
|
||||
// save or delete upload when APPLY is pressed
|
||||
if (filename=='reset') {
|
||||
$.post("/webGui/include/FileUpload.php",{cmd:'delete',path:path,filename:'banner.png'});
|
||||
} else if (filename) {
|
||||
$.post("/webGui/include/FileUpload.php",{cmd:'save',path:path,filename:filename,output:'banner.png'});
|
||||
}
|
||||
// reset dashboard tiles when switching language
|
||||
if (lang != locale) {
|
||||
$.removeCookie('db-box1');
|
||||
$.removeCookie('db-box2');
|
||||
$.removeCookie('db-box3');
|
||||
$.removeCookie('inactive_content');
|
||||
$.removeCookie('hidden_content');
|
||||
}
|
||||
}
|
||||
function presetBanner(form) {
|
||||
if (form.banner.selectedIndex == 0) $('.js-bannerSettings').hide(); else $('.js-bannerSettings').show();
|
||||
}
|
||||
function presetRefresh(form) {
|
||||
for (var i=0,item; item=form.refresh.options[i]; i++) item.value *= -1;
|
||||
}
|
||||
function presetPassive(index) {
|
||||
if (index==0) $('#passive').hide(); else $('#passive').show();
|
||||
}
|
||||
function updateDirection(lang) {
|
||||
// var rtl = ['ar_AR','fa_FA'].includes(lang) ? "dir='rtl' " : "";
|
||||
// RTL display is not giving the desired results, we keep LTR
|
||||
var rtl = "";
|
||||
$('input[name="rtl"]').val(rtl);
|
||||
}
|
||||
|
||||
$(function() {
|
||||
var dropbox = $('#dropbox');
|
||||
// attach the drag-n-drop feature to the 'dropbox' element
|
||||
dropbox.filedrop({
|
||||
maxfiles:1,
|
||||
maxfilesize:512, // KB
|
||||
data: {"csrf_token": "<?=$var['csrf_token']?>"},
|
||||
url:'/webGui/include/FileUpload.php',
|
||||
beforeEach:function(file) {
|
||||
if (!file.type.match(/^image\/.*/)) {
|
||||
swal({title:"_(Warning)_",text:"_(Only PNG images are allowed)_!",type:"warning",html:true,confirmButtonText:"_(Ok)_"});
|
||||
return false;
|
||||
}
|
||||
},
|
||||
error: function(err, file, i) {
|
||||
switch (err) {
|
||||
case 'BrowserNotSupported':
|
||||
swal({title:"_(Browser error)_",text:"_(Your browser does not support HTML5 file uploads)_!",type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
break;
|
||||
case 'TooManyFiles':
|
||||
swal({title:"_(Too many files)_",text:"_(Please select one file only)_!",html:true,type:"error"});
|
||||
break;
|
||||
case 'FileTooLarge':
|
||||
swal({title:"_(File too large)_",text:"_(Maximum file upload size is 512K)_ (524,288 _(bytes)_)",type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
break;
|
||||
}
|
||||
},
|
||||
uploadStarted:function(i,file,count) {
|
||||
var image = $('img', $(dropbox));
|
||||
var reader = new FileReader();
|
||||
image.width = 330;
|
||||
image.height = 30;
|
||||
reader.onload = function(e){image.attr('src',e.target.result);};
|
||||
reader.readAsDataURL(file);
|
||||
},
|
||||
uploadFinished:function(i,file,response) {
|
||||
if (response == 'OK 200') {
|
||||
if (!filename || filename=='reset') $(dropbox).append("<?=$icon?>");
|
||||
$('select[name="banner"]').trigger('change');
|
||||
filename = file.name;
|
||||
} else {
|
||||
swal({title:"_(Upload error)_",text:response,type:"error",html:true,confirmButtonText:"_(Ok)_"});
|
||||
}
|
||||
}
|
||||
});
|
||||
// simulate a drop action when manual file selection is done
|
||||
$('#drop').bind('change', function(e) {
|
||||
var files = e.target.files;
|
||||
if ($('#dropbox').triggerHandler({type:'drop',dataTransfer:{files:files}})==false) e.stopImmediatePropagation();
|
||||
});
|
||||
presetBanner(document.display_settings);
|
||||
});
|
||||
</script>
|
||||
|
||||
:display_settings_help:
|
||||
|
||||
<form markdown="1" name="display_settings" method="POST" action="/update.php" target="progressFrame" onsubmit="upload(this.locale.value)">
|
||||
<input type="hidden" name="#file" value="dynamix/dynamix.cfg">
|
||||
<input type="hidden" name="#section" value="display">
|
||||
<input type="hidden" name="rtl" value="<?=$display['rtl']?>">
|
||||
|
||||
_(Display width)_:
|
||||
: <select name="width">
|
||||
<?=mk_option($display['width'], "",_('Boxed'))?>
|
||||
<?=mk_option($display['width'], "1",_('Unlimited'))?>
|
||||
</select>
|
||||
|
||||
:display_width_help:
|
||||
|
||||
_(Language)_:
|
||||
: <select name="locale" onchange="updateDirection(this.value)">
|
||||
<?echo mk_option($display['locale'], "","English");
|
||||
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
|
||||
$lang = language('Language', $xml_file);
|
||||
$home = language('LanguageLocal', $xml_file);
|
||||
$name = language('LanguagePack', $xml_file);
|
||||
echo mk_option($display['locale'], $name, "$home ($lang)");
|
||||
}
|
||||
?></select>
|
||||
|
||||
_(Font size)_:
|
||||
: <select name="font" id='font'>
|
||||
<?=mk_option($display['font'], "50",_('Very small'))?>
|
||||
<?=mk_option($display['font'], "56.25",_('Small'))?>
|
||||
<?=mk_option($display['font'], "",_('Normal'))?>
|
||||
<?=mk_option($display['font'], "68.75",_('Large'))?>
|
||||
<?=mk_option($display['font'], "75",_('Very large'))?>
|
||||
<?=mk_option($display['font'], "80",_('Huge'))?>
|
||||
</select>
|
||||
|
||||
:display_font_size_help:
|
||||
|
||||
_(Terminal font size)_:
|
||||
: <select name="tty" id="tty">
|
||||
<?=mk_option($display['tty'], "11",_('Very small'))?>
|
||||
<?=mk_option($display['tty'], "13",_('Small'))?>
|
||||
<?=mk_option($display['tty'], "15",_('Normal'))?>
|
||||
<?=mk_option($display['tty'], "17",_('Large'))?>
|
||||
<?=mk_option($display['tty'], "19",_('Very large'))?>
|
||||
<?=mk_option($display['tty'], "21",_('Huge'))?>
|
||||
</select>
|
||||
|
||||
:display_tty_size_help:
|
||||
|
||||
_(Number format)_:
|
||||
: <select name="number">
|
||||
<?=mk_option($display['number'], ".,",_('[D] dot : [G] comma'))?>
|
||||
<?=mk_option($display['number'], ". ",_('[D] dot : [G] space'))?>
|
||||
<?=mk_option($display['number'], ".",_('[D] dot : [G] none'))?>
|
||||
<?=mk_option($display['number'], ",.",_('[D] comma : [G] dot'))?>
|
||||
<?=mk_option($display['number'], ", ",_('[D] comma : [G] space'))?>
|
||||
<?=mk_option($display['number'], ",",_('[D] comma : [G] none'))?>
|
||||
</select>
|
||||
|
||||
_(Number scaling)_:
|
||||
: <select name="scale">
|
||||
<?=mk_option($display['scale'], "-1",_('Automatic'))?>
|
||||
<?=mk_option($display['scale'], "0",_('Disabled'))?>
|
||||
<?=mk_option($display['scale'], "1",_('KB'))?>
|
||||
<?=mk_option($display['scale'], "2",_('MB'))?>
|
||||
<?=mk_option($display['scale'], "3",_('GB'))?>
|
||||
<?=mk_option($display['scale'], "4",_('TB'))?>
|
||||
<?=mk_option($display['scale'], "5",_('PB'))?>
|
||||
</select>
|
||||
|
||||
_(Page view)_:
|
||||
: <select name="tabs">
|
||||
<?=mk_option($display['tabs'], "0",_('Tabbed'))?>
|
||||
<?=mk_option($display['tabs'], "1",_('Non-tabbed'))?>
|
||||
</select>
|
||||
|
||||
:display_page_view_help:
|
||||
|
||||
_(Placement of Users menu)_:
|
||||
: <select name="users">
|
||||
<?=mk_option($display['users'], "Tasks:3",_('Header menu'))?>
|
||||
<?=mk_option($display['users'], "UserPreferences",_('Settings menu'))?>
|
||||
</select>
|
||||
|
||||
:display_users_menu_help:
|
||||
|
||||
_(Listing height)_:
|
||||
: <select name="resize">
|
||||
<?=mk_option($display['resize'], "0",_('Automatic'))?>
|
||||
<?=mk_option($display['resize'], "1",_('Fixed'))?>
|
||||
</select>
|
||||
|
||||
:display_listing_height_help:
|
||||
|
||||
_(Display device name)_:
|
||||
: <select name="raw">
|
||||
<?=mk_option($display['raw'], "",_('Normalized'))?>
|
||||
<?=mk_option($display['raw'], "1",_('Raw'))?>
|
||||
</select>
|
||||
|
||||
_(Display world-wide-name in device ID)_:
|
||||
: <select name="wwn">
|
||||
<?=mk_option($display['wwn'], "0",_('Disabled'))?>
|
||||
<?=mk_option($display['wwn'], "1",_('Automatic'))?>
|
||||
</select>
|
||||
|
||||
:display_wwn_device_id_help:
|
||||
|
||||
_(Display array totals)_:
|
||||
: <select name="total">
|
||||
<?=mk_option($display['total'], "0",_('No'))?>
|
||||
<?=mk_option($display['total'], "1",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
_(Show array utilization indicator)_:
|
||||
: <select name="usage">
|
||||
<?=mk_option($display['usage'], "0",_('No'))?>
|
||||
<?=mk_option($display['usage'], "1",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
_(Temperature unit)_:
|
||||
: <select name="unit">
|
||||
<?=mk_option($display['unit'], "C",_('Celsius'))?>
|
||||
<?=mk_option($display['unit'], "F",_('Fahrenheit'))?>
|
||||
</select>
|
||||
|
||||
:display_temperature_unit_help:
|
||||
|
||||
_(Dynamix color theme)_:
|
||||
: <select name="theme">
|
||||
<?foreach (glob("$docroot/webGui/styles/themes/*.css") as $themes):?>
|
||||
<?$theme = basename($themes, '.css');?>
|
||||
<?=mk_option($display['theme'], $theme, _(ucfirst($theme)))?>
|
||||
<?endforeach;?>
|
||||
</select>
|
||||
|
||||
_(Used / Free columns)_:
|
||||
: <select name="text">
|
||||
<?=mk_option($display['text'], "0",_('Text'))?>
|
||||
<?=mk_option($display['text'], "1",_('Bar (gray)'))?>
|
||||
<?=mk_option($display['text'], "2",_('Bar (color)'))?>
|
||||
<?=mk_option($display['text'], "10",_('Text - Bar (gray)'))?>
|
||||
<?=mk_option($display['text'], "20",_('Text - Bar (color)'))?>
|
||||
<?=mk_option($display['text'], "11",_('Bar (gray) - Text'))?>
|
||||
<?=mk_option($display['text'], "21",_('Bar (color) - Text'))?>
|
||||
</select>
|
||||
|
||||
_(Header custom text color)_:
|
||||
: <input type="text" class="narrow" name="header" value="<?=$display['header']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
:display_custom_text_color_help:
|
||||
|
||||
_(Header custom secondary text color)_:
|
||||
: <input type="text" class="narrow" name="headermetacolor" value="<?=$display['headermetacolor']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
_(Header custom background color)_:
|
||||
: <input type="text" class="narrow" name="background" value="<?=$display['background']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
|
||||
|
||||
:display_custom_background_color_help:
|
||||
|
||||
_(Header show description)_:
|
||||
: <select name="headerdescription">
|
||||
<?=mk_option($display['headerdescription'], "yes",_('Yes'))?>
|
||||
<?=mk_option($display['headerdescription'], "no",_('No'))?>
|
||||
</select>
|
||||
|
||||
_(Show banner)_:
|
||||
: <select name="banner" onchange="presetBanner(this.form)">
|
||||
<?=mk_option($display['banner'], "",_('No'))?>
|
||||
<?=mk_option($display['banner'], "image",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
<div class="js-bannerSettings" markdown="1" style="display:none">
|
||||
_(Custom banner)_:
|
||||
<input type="hidden" name="#custom" value="">
|
||||
: <span id="dropbox">
|
||||
<?if (file_exists($banner)):?>
|
||||
<img src="<?=autov($banner)?>" width="330" height="30" onclick="$('#drop').click()" style="cursor:pointer" title="_(Click to select PNG file)_"><?=$icon?>
|
||||
<?else:?>
|
||||
<?=$void?>
|
||||
<?endif;?>
|
||||
</span><em>_(Drag-n-drop a PNG file or click the image at the left)_.</em><input type="file" id="drop" accept="image/*" style="display:none">
|
||||
|
||||
:display_custom_banner_help:
|
||||
</div>
|
||||
|
||||
<div class="js-bannerSettings" markdown="1" style="display:none">
|
||||
_(Show banner background color fade)_:
|
||||
: <select name="showBannerGradient">
|
||||
<?=mk_option($display['showBannerGradient'], "no",_('No'))?>
|
||||
<?=mk_option($display['showBannerGradient'], "yes",_('Yes'))?>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
_(Favorites enabled)_:
|
||||
: <select name="favorites">
|
||||
<?=mk_option($display['favorites'], "yes",_('Yes'))?>
|
||||
<?=mk_option($display['favorites'], "no",_('No'))?>
|
||||
</select>
|
||||
|
||||
:display_favorites_enabled_help:
|
||||
|
||||
_(Allow realtime updates on inactive browsers)_:
|
||||
: <select name='liveUpdate'>
|
||||
<?=mk_option($display['liveUpdate'],"no",_('No'))?>
|
||||
<?=mk_option($display['liveUpdate'],"yes",_('Yes'))?>
|
||||
</select>
|
||||
|
||||
<input type="submit" name="#default" value="_(Default)_" onclick="filename='reset'">
|
||||
: <input type="submit" name="#apply" value="_(Apply)_" disabled><input type="button" value="_(Done)_" onclick="done()">
|
||||
</form>
|
||||
@@ -14,13 +14,13 @@ export default class AuthRequestModification extends FileModification {
|
||||
id: string = 'auth-request';
|
||||
|
||||
/**
|
||||
* Get the list of .js files in the given directory
|
||||
* @param dir - The directory to search for .js files
|
||||
* @returns The list of .js files in the given directory
|
||||
* Get the list of .js and .css files in the given directory
|
||||
* @param dir - The directory to search for .js and .css files
|
||||
* @returns The list of .js and .css files in the given directory
|
||||
*/
|
||||
private getJsFiles = async (dir: string) => {
|
||||
private getAssetFiles = async (dir: string) => {
|
||||
const { glob } = await import('glob');
|
||||
const files = await glob(join(dir, '**/*.js'));
|
||||
const files = await glob(join(dir, '**/*.{js,css}'));
|
||||
const baseDir = '/usr/local/emhttp';
|
||||
return files.map((file) => (file.startsWith(baseDir) ? file.slice(baseDir.length) : file));
|
||||
};
|
||||
@@ -33,6 +33,30 @@ export default class AuthRequestModification extends FileModification {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this modification should be applied based on Unraid version
|
||||
* Only apply for Unraid versions up to 7.2.0-beta.2.3
|
||||
*/
|
||||
async shouldApply(): Promise<ShouldApplyWithReason> {
|
||||
// Apply for versions up to and including 7.2.0-beta.2.3
|
||||
const maxVersion = '7.2.0-beta.2.3';
|
||||
const isCompatibleVersion = await this.isUnraidVersionLessThanOrEqualTo(maxVersion, {
|
||||
includePrerelease: true,
|
||||
});
|
||||
|
||||
if (!isCompatibleVersion) {
|
||||
return {
|
||||
shouldApply: false,
|
||||
reason: `Auth request modification only applies to Unraid versions up to ${maxVersion}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
shouldApply: true,
|
||||
reason: `Auth request modification needed for Unraid version <= ${maxVersion}`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a patch for the auth-request.php file
|
||||
* @param overridePath - The path to override the default file path
|
||||
@@ -40,10 +64,12 @@ export default class AuthRequestModification extends FileModification {
|
||||
*/
|
||||
protected async generatePatch(overridePath?: string): Promise<string> {
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
const jsFiles = await this.getJsFiles(this.webComponentsDirectory);
|
||||
this.logger.debug(`Found ${jsFiles.length} .js files in ${this.webComponentsDirectory}`);
|
||||
const assetFiles = await this.getAssetFiles(this.webComponentsDirectory);
|
||||
this.logger.debug(
|
||||
`Found ${assetFiles.length} asset files (.js and .css) in ${this.webComponentsDirectory}`
|
||||
);
|
||||
|
||||
const filesToAdd = [getters.paths().webgui.logo.assetPath, ...jsFiles];
|
||||
const filesToAdd = [getters.paths().webgui.logo.assetPath, ...assetFiles];
|
||||
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
throw new Error(`File ${this.filePath} not found.`);
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import {
|
||||
FileModification,
|
||||
ShouldApplyWithReason,
|
||||
} from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
export default class DisplaySettingsModification extends FileModification {
|
||||
id: string = 'display-settings';
|
||||
public readonly filePath: string = '/usr/local/emhttp/plugins/dynamix/DisplaySettings.page';
|
||||
|
||||
private removeFixedClassFromLanguageSelect(source: string): string {
|
||||
// Find lines with locale select and remove class="fixed" from them
|
||||
return source
|
||||
.split('\n')
|
||||
.map((line) => {
|
||||
// Check if this line contains the locale select element
|
||||
if (line.includes('<select name="locale"') && line.includes('class="fixed"')) {
|
||||
// Remove class="fixed" from the line, handling potential spacing variations
|
||||
return line.replace(/\s*class="fixed"\s*/, ' ').replace(/\s+/g, ' ');
|
||||
}
|
||||
return line;
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
private applyToSource(fileContent: string): string {
|
||||
const transformers = [this.removeFixedClassFromLanguageSelect.bind(this)];
|
||||
|
||||
return transformers.reduce((content, transformer) => transformer(content), fileContent);
|
||||
}
|
||||
|
||||
protected async generatePatch(overridePath?: string): Promise<string> {
|
||||
const fileContent = await readFile(this.filePath, 'utf-8');
|
||||
|
||||
const newContent = await this.applyToSource(fileContent);
|
||||
|
||||
return this.createPatchWithDiff(overridePath ?? this.filePath, fileContent, newContent);
|
||||
}
|
||||
|
||||
async shouldApply(): Promise<ShouldApplyWithReason> {
|
||||
const superShouldApply = await super.shouldApply();
|
||||
if (!superShouldApply.shouldApply) {
|
||||
return superShouldApply;
|
||||
}
|
||||
return {
|
||||
shouldApply: true,
|
||||
reason: 'Display settings modification needed for Unraid version <= 7.2.0-beta.2.3',
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
Index: /usr/local/emhttp/plugins/dynamix/DisplaySettings.page
|
||||
===================================================================
|
||||
--- /usr/local/emhttp/plugins/dynamix/DisplaySettings.page original
|
||||
+++ /usr/local/emhttp/plugins/dynamix/DisplaySettings.page modified
|
||||
@@ -134,11 +134,11 @@
|
||||
</select>
|
||||
|
||||
:display_width_help:
|
||||
|
||||
_(Language)_:
|
||||
-: <select name="locale" class="fixed" onchange="updateDirection(this.value)">
|
||||
+: <select name="locale" onchange="updateDirection(this.value)">
|
||||
<?echo mk_option($display['locale'], "","English");
|
||||
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
|
||||
$lang = language('Language', $xml_file);
|
||||
$home = language('LanguageLocal', $xml_file);
|
||||
$name = language('LanguagePack', $xml_file);
|
||||
28
api/src/unraid-api/utils/feature-flag.helper.ts
Normal file
28
api/src/unraid-api/utils/feature-flag.helper.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { ForbiddenException } from '@nestjs/common';
|
||||
|
||||
/**
|
||||
* Checks if a feature flag is enabled and throws an exception if disabled.
|
||||
* Use this at the beginning of resolver methods for immediate feature flag checks.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @ResolveField(() => String)
|
||||
* async organizer() {
|
||||
* checkFeatureFlag(FeatureFlags, 'ENABLE_NEXT_DOCKER_RELEASE');
|
||||
* return this.dockerOrganizerService.resolveOrganizer();
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param flags - The feature flag object containing boolean/truthy values
|
||||
* @param key - The key within the feature flag object to check
|
||||
* @throws ForbiddenException if the feature flag is disabled
|
||||
*/
|
||||
export function checkFeatureFlag<T extends Record<string, any>>(flags: T, key: keyof T): void {
|
||||
const isEnabled = Boolean(flags[key]);
|
||||
|
||||
if (!isEnabled) {
|
||||
throw new ForbiddenException(
|
||||
`Feature "${String(key)}" is currently disabled. This functionality is not available at this time.`
|
||||
);
|
||||
}
|
||||
}
|
||||
332
api/src/unraid-api/utils/graphql-field-helper.spec.ts
Normal file
332
api/src/unraid-api/utils/graphql-field-helper.spec.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
import { buildSchema, FieldNode, GraphQLResolveInfo, parse } from 'graphql';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||
|
||||
describe('GraphQLFieldHelper', () => {
|
||||
const schema = buildSchema(`
|
||||
type User {
|
||||
id: String
|
||||
name: String
|
||||
email: String
|
||||
profile: Profile
|
||||
posts: [Post]
|
||||
settings: Settings
|
||||
}
|
||||
|
||||
type Profile {
|
||||
avatar: String
|
||||
bio: String
|
||||
}
|
||||
|
||||
type Post {
|
||||
title: String
|
||||
content: String
|
||||
}
|
||||
|
||||
type Settings {
|
||||
theme: String
|
||||
language: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
user: User
|
||||
users: [User]
|
||||
}
|
||||
`);
|
||||
|
||||
const createMockInfo = (query: string): GraphQLResolveInfo => {
|
||||
const document = parse(query);
|
||||
const operation = document.definitions[0] as any;
|
||||
const fieldNode = operation.selectionSet.selections[0] as FieldNode;
|
||||
|
||||
return {
|
||||
fieldName: fieldNode.name.value,
|
||||
fieldNodes: [fieldNode],
|
||||
returnType: schema.getType('User') as any,
|
||||
parentType: schema.getType('Query') as any,
|
||||
path: { prev: undefined, key: fieldNode.name.value, typename: 'Query' },
|
||||
schema,
|
||||
fragments: {},
|
||||
rootValue: {},
|
||||
operation,
|
||||
variableValues: {},
|
||||
} as GraphQLResolveInfo;
|
||||
};
|
||||
|
||||
describe('getRequestedFields', () => {
|
||||
it('should return flat fields structure', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fields = GraphQLFieldHelper.getRequestedFields(mockInfo);
|
||||
|
||||
expect(fields).toEqual({
|
||||
id: {},
|
||||
name: {},
|
||||
email: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return nested fields structure', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
settings {
|
||||
theme
|
||||
language
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fields = GraphQLFieldHelper.getRequestedFields(mockInfo);
|
||||
|
||||
expect(fields).toEqual({
|
||||
id: {},
|
||||
profile: {
|
||||
avatar: {},
|
||||
bio: {},
|
||||
},
|
||||
settings: {
|
||||
theme: {},
|
||||
language: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFieldRequested', () => {
|
||||
it('should return true for requested top-level field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'id')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'name')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'email')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-requested field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'email')).toBe(false);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle nested field paths', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile.avatar')).toBe(true);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'profile.bio')).toBe(false);
|
||||
expect(GraphQLFieldHelper.isFieldRequested(mockInfo, 'settings')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRequestedFieldsList', () => {
|
||||
it('should return list of top-level field names', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const fieldsList = GraphQLFieldHelper.getRequestedFieldsList(mockInfo);
|
||||
|
||||
expect(fieldsList).toEqual(['id', 'name', 'email', 'profile']);
|
||||
});
|
||||
|
||||
it('should return empty array for no fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user
|
||||
}
|
||||
`);
|
||||
|
||||
const fieldsList = GraphQLFieldHelper.getRequestedFieldsList(mockInfo);
|
||||
|
||||
expect(fieldsList).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasNestedFields', () => {
|
||||
it('should return true when field has nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'profile')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when field has no nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'id')).toBe(false);
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'name')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-existent field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.hasNestedFields(mockInfo, 'profile')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNestedFields', () => {
|
||||
it('should return nested fields object', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
bio
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
const nestedFields = GraphQLFieldHelper.getNestedFields(mockInfo, 'profile');
|
||||
|
||||
expect(nestedFields).toEqual({
|
||||
avatar: {},
|
||||
bio: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null for field without nested selections', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'id')).toBeNull();
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'name')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for non-existent field', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.getNestedFields(mockInfo, 'profile')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('shouldFetchRelation', () => {
|
||||
it('should return true when relation is requested with nested fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
profile {
|
||||
avatar
|
||||
}
|
||||
posts {
|
||||
title
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'profile')).toBe(true);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'posts')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when relation has no nested fields', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'id')).toBe(false);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'name')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when relation is not requested', () => {
|
||||
const mockInfo = createMockInfo(`
|
||||
query {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'profile')).toBe(false);
|
||||
expect(GraphQLFieldHelper.shouldFetchRelation(mockInfo, 'posts')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
63
api/src/unraid-api/utils/graphql-field-helper.ts
Normal file
63
api/src/unraid-api/utils/graphql-field-helper.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { GraphQLResolveInfo } from 'graphql';
|
||||
import graphqlFields from 'graphql-fields';
|
||||
|
||||
export interface RequestedFields {
|
||||
[key: string]: RequestedFields | {};
|
||||
}
|
||||
|
||||
export interface GraphQLFieldOptions {
|
||||
processArguments?: boolean;
|
||||
excludedFields?: string[];
|
||||
}
|
||||
|
||||
export class GraphQLFieldHelper {
|
||||
static getRequestedFields(info: GraphQLResolveInfo, options?: GraphQLFieldOptions): RequestedFields {
|
||||
return graphqlFields(info, {}, options);
|
||||
}
|
||||
|
||||
static isFieldRequested(info: GraphQLResolveInfo, fieldPath: string): boolean {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const pathParts = fieldPath.split('.');
|
||||
|
||||
let current: RequestedFields | {} = fields;
|
||||
for (const part of pathParts) {
|
||||
if (!(part in current)) {
|
||||
return false;
|
||||
}
|
||||
current = current[part as keyof typeof current] as RequestedFields | {};
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static getRequestedFieldsList(info: GraphQLResolveInfo): string[] {
|
||||
const fields = this.getRequestedFields(info);
|
||||
return Object.keys(fields);
|
||||
}
|
||||
|
||||
static hasNestedFields(info: GraphQLResolveInfo, fieldName: string): boolean {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const field = fields[fieldName];
|
||||
return field !== undefined && Object.keys(field).length > 0;
|
||||
}
|
||||
|
||||
static getNestedFields(info: GraphQLResolveInfo, fieldName: string): RequestedFields | null {
|
||||
const fields = this.getRequestedFields(info);
|
||||
const field = fields[fieldName];
|
||||
|
||||
if (!field || typeof field !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// graphql-fields returns {} for fields without nested selections
|
||||
if (Object.keys(field).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return field as RequestedFields;
|
||||
}
|
||||
|
||||
static shouldFetchRelation(info: GraphQLResolveInfo, relationName: string): boolean {
|
||||
return this.isFieldRequested(info, relationName) && this.hasNestedFields(info, relationName);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { basename, join } from 'node:path';
|
||||
|
||||
import type { ViteUserConfig } from 'vitest/config';
|
||||
import { viteCommonjs } from '@originjs/vite-plugin-commonjs';
|
||||
import nodeResolve from '@rollup/plugin-node-resolve';
|
||||
@@ -70,6 +73,29 @@ export default defineConfig(({ mode }): ViteUserConfig => {
|
||||
},
|
||||
},
|
||||
}),
|
||||
// Copy PHP files to assets directory
|
||||
{
|
||||
name: 'copy-php-files',
|
||||
buildStart() {
|
||||
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
|
||||
phpFiles.forEach((file) => this.addWatchFile(file));
|
||||
},
|
||||
async generateBundle() {
|
||||
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
|
||||
phpFiles.forEach((file) => {
|
||||
if (!existsSync(file)) {
|
||||
this.warn(`[copy-php-files] PHP file ${file} does not exist`);
|
||||
return;
|
||||
}
|
||||
const content = readFileSync(file);
|
||||
this.emitFile({
|
||||
type: 'asset',
|
||||
fileName: join('assets', basename(file)),
|
||||
source: content,
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
define: {
|
||||
// Allows vite to preserve process.env variables and not hardcode them
|
||||
|
||||
15
package.json
15
package.json
@@ -1,14 +1,15 @@
|
||||
{
|
||||
"name": "unraid-monorepo",
|
||||
"private": true,
|
||||
"version": "4.18.2",
|
||||
"version": "4.22.2",
|
||||
"scripts": {
|
||||
"build": "pnpm -r build",
|
||||
"build:watch": " pnpm -r --parallel build:watch",
|
||||
"build:watch": "pnpm -r --parallel --filter '!@unraid/ui' build:watch",
|
||||
"codegen": "pnpm -r codegen",
|
||||
"dev": "pnpm -r dev",
|
||||
"unraid:deploy": "pnpm -r unraid:deploy",
|
||||
"test": "pnpm -r test",
|
||||
"test:watch": "pnpm -r --parallel test:watch",
|
||||
"lint": "pnpm -r lint",
|
||||
"lint:fix": "pnpm -r lint:fix",
|
||||
"type-check": "pnpm -r type-check",
|
||||
@@ -62,8 +63,14 @@
|
||||
"pre-commit": "pnpm lint-staged"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,vue}": [
|
||||
"pnpm lint:fix"
|
||||
"api/**/*.{js,ts}": [
|
||||
"pnpm --filter api lint:fix"
|
||||
],
|
||||
"web/**/*.{js,ts,tsx,vue}": [
|
||||
"pnpm --filter web lint:fix"
|
||||
],
|
||||
"unraid-ui/**/*.{js,ts,tsx,vue}": [
|
||||
"pnpm --filter @unraid/ui lint:fix"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.15.0"
|
||||
|
||||
@@ -17,6 +17,7 @@ const config: CodegenConfig = {
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
BigInt: 'number',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
@@ -24,6 +25,7 @@ const config: CodegenConfig = {
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
BigInt: 'z.number()',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
|
||||
@@ -25,8 +25,8 @@
|
||||
"description": "Unraid Connect plugin for Unraid API",
|
||||
"devDependencies": {
|
||||
"@apollo/client": "3.14.0",
|
||||
"@faker-js/faker": "9.9.0",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@faker-js/faker": "10.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
@@ -43,7 +43,7 @@
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
@@ -60,7 +60,7 @@
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "6.0.1",
|
||||
"rxjs": "7.8.2",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"undici": "7.15.0",
|
||||
"vitest": "3.2.4",
|
||||
@@ -84,7 +84,7 @@
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@runonflux/nat-upnp": "1.0.2",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user